QCamera3HWI.cpp revision c504c0c7f125a9b6fee2e375af85432a6e1d9b51
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define ATRACE_TAG ATRACE_TAG_CAMERA
31#define LOG_TAG "QCamera3HWI"
32//#define LOG_NDEBUG 0
33
34#define __STDC_LIMIT_MACROS
35#include <cutils/properties.h>
36#include <hardware/camera3.h>
37#include <camera/CameraMetadata.h>
38#include <stdio.h>
39#include <stdlib.h>
40#include <fcntl.h>
41#include <stdint.h>
42#include <utils/Log.h>
43#include <utils/Errors.h>
44#include <utils/Trace.h>
45#include <ui/Fence.h>
46#include <gralloc_priv.h>
47#include "QCamera3HWI.h"
48#include "QCamera3Mem.h"
49#include "QCamera3Channel.h"
50#include "QCamera3PostProc.h"
51#include "QCamera3VendorTags.h"
52
53using namespace android;
54
55namespace qcamera {
56
57#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
58
59#define EMPTY_PIPELINE_DELAY 2
60#define PARTIAL_RESULT_COUNT 2
61#define FRAME_SKIP_DELAY     0
62#define CAM_MAX_SYNC_LATENCY 4
63
64#define MAX_VALUE_8BIT ((1<<8)-1)
65#define MAX_VALUE_10BIT ((1<<10)-1)
66#define MAX_VALUE_12BIT ((1<<12)-1)
67
68#define VIDEO_4K_WIDTH  3840
69#define VIDEO_4K_HEIGHT 2160
70
71#define MAX_RAW_STREAMS        1
72#define MAX_STALLING_STREAMS   1
73#define MAX_PROCESSED_STREAMS  3
74/* Batch mode is enabled only if FPS set is equal to or greater than this */
75#define MIN_FPS_FOR_BATCH_MODE (120)
76#define PREVIEW_FPS_FOR_HFR    (30)
77#define DEFAULT_VIDEO_FPS      (30.0)
78#define MAX_HFR_BATCH_SIZE     (4)
79#define REGIONS_TUPLE_COUNT    5
80
81#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
82
83#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
84                                                CAM_QCOM_FEATURE_CROP |\
85                                                CAM_QCOM_FEATURE_ROTATION |\
86                                                CAM_QCOM_FEATURE_SHARPNESS |\
87                                                CAM_QCOM_FEATURE_SCALE |\
88                                                CAM_QCOM_FEATURE_CAC )
89
90cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
91const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
92static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
93volatile uint32_t gCamHal3LogLevel = 1;
94
95const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
96    {"On",  CAM_CDS_MODE_ON},
97    {"Off", CAM_CDS_MODE_OFF},
98    {"Auto",CAM_CDS_MODE_AUTO}
99};
100
101const QCamera3HardwareInterface::QCameraMap<
102        camera_metadata_enum_android_control_effect_mode_t,
103        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
104    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
105    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
106    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
107    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
108    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
109    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
110    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
111    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
112    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
113};
114
115const QCamera3HardwareInterface::QCameraMap<
116        camera_metadata_enum_android_control_awb_mode_t,
117        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
118    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
119    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
120    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
121    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
122    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
123    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
124    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
125    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
126    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
127};
128
129const QCamera3HardwareInterface::QCameraMap<
130        camera_metadata_enum_android_control_scene_mode_t,
131        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
132    { ANDROID_CONTROL_SCENE_MODE_DISABLED,       CAM_SCENE_MODE_OFF},
133    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
134    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
135    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
136    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
137    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
138    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
139    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
140    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
141    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
142    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
143    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
144    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
145    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
146    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
147    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
148    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
149};
150
151const QCamera3HardwareInterface::QCameraMap<
152        camera_metadata_enum_android_control_af_mode_t,
153        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
154    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
155    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
156    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
157    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
158    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
159    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
160    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
161};
162
163const QCamera3HardwareInterface::QCameraMap<
164        camera_metadata_enum_android_color_correction_aberration_mode_t,
165        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
166    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
167            CAM_COLOR_CORRECTION_ABERRATION_OFF },
168    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
169            CAM_COLOR_CORRECTION_ABERRATION_FAST },
170    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
171            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
172};
173
174const QCamera3HardwareInterface::QCameraMap<
175        camera_metadata_enum_android_control_ae_antibanding_mode_t,
176        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
177    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
178    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
179    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
180    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
181};
182
183const QCamera3HardwareInterface::QCameraMap<
184        camera_metadata_enum_android_control_ae_mode_t,
185        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
186    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
187    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
188    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
189    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
190    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194        camera_metadata_enum_android_flash_mode_t,
195        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
196    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
197    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
198    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_statistics_face_detect_mode_t,
203        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
204    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
205    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
210        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
211    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
212      CAM_FOCUS_UNCALIBRATED },
213    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
214      CAM_FOCUS_APPROXIMATE },
215    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
216      CAM_FOCUS_CALIBRATED }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220        camera_metadata_enum_android_lens_state_t,
221        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
222    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
223    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
224};
225
226const int32_t available_thumbnail_sizes[] = {0, 0,
227                                             176, 144,
228                                             320, 240,
229                                             432, 288,
230                                             480, 288,
231                                             512, 288,
232                                             512, 384};
233
234const QCamera3HardwareInterface::QCameraMap<
235        camera_metadata_enum_android_sensor_test_pattern_mode_t,
236        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
237    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
238    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
239    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
240    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
241    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
242};
243
244/* Since there is no mapping for all the options some Android enum are not listed.
245 * Also, the order in this list is important because while mapping from HAL to Android it will
246 * traverse from lower to higher index which means that for HAL values that are map to different
247 * Android values, the traverse logic will select the first one found.
248 */
249const QCamera3HardwareInterface::QCameraMap<
250        camera_metadata_enum_android_sensor_reference_illuminant1_t,
251        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
252    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
253    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
254    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
255    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
256    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
257    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
258    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
259    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
260    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
261    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
262    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
263    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
264    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
265    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
266    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
267    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
268};
269
270const QCamera3HardwareInterface::QCameraMap<
271        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
272    { 60, CAM_HFR_MODE_60FPS},
273    { 90, CAM_HFR_MODE_90FPS},
274    { 120, CAM_HFR_MODE_120FPS},
275    { 150, CAM_HFR_MODE_150FPS},
276    { 180, CAM_HFR_MODE_180FPS},
277    { 210, CAM_HFR_MODE_210FPS},
278    { 240, CAM_HFR_MODE_240FPS},
279    { 480, CAM_HFR_MODE_480FPS},
280};
281
282camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
283    initialize:                         QCamera3HardwareInterface::initialize,
284    configure_streams:                  QCamera3HardwareInterface::configure_streams,
285    register_stream_buffers:            NULL,
286    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
287    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
288    get_metadata_vendor_tag_ops:        NULL,
289    dump:                               QCamera3HardwareInterface::dump,
290    flush:                              QCamera3HardwareInterface::flush,
291    reserved:                           {0},
292};
293
294/*===========================================================================
295 * FUNCTION   : QCamera3HardwareInterface
296 *
297 * DESCRIPTION: constructor of QCamera3HardwareInterface
298 *
299 * PARAMETERS :
300 *   @cameraId  : camera ID
301 *
302 * RETURN     : none
303 *==========================================================================*/
304QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
305        const camera_module_callbacks_t *callbacks)
306    : mCameraId(cameraId),
307      mCameraHandle(NULL),
308      mCameraOpened(false),
309      mCameraInitialized(false),
310      mCallbackOps(NULL),
311      mMetadataChannel(NULL),
312      mPictureChannel(NULL),
313      mRawChannel(NULL),
314      mSupportChannel(NULL),
315      mAnalysisChannel(NULL),
316      mRawDumpChannel(NULL),
317      mFirstRequest(false),
318      mFirstConfiguration(true),
319      mFlush(false),
320      mParamHeap(NULL),
321      mParameters(NULL),
322      mPrevParameters(NULL),
323      m_bIsVideo(false),
324      m_bIs4KVideo(false),
325      m_bEisSupportedSize(false),
326      m_bEisEnable(false),
327      m_MobicatMask(0),
328      mMinProcessedFrameDuration(0),
329      mMinJpegFrameDuration(0),
330      mMinRawFrameDuration(0),
331      m_pPowerModule(NULL),
332      mMetaFrameCount(0U),
333      mUpdateDebugLevel(false),
334      mCallbacks(callbacks),
335      mCaptureIntent(0),
336      mBatchSize(0),
337      mToBeQueuedVidBufs(0),
338      mHFRVideoFps(DEFAULT_VIDEO_FPS),
339      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE)
340{
341    getLogLevel();
342    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
343    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
344    mCameraDevice.common.close = close_camera_device;
345    mCameraDevice.ops = &mCameraOps;
346    mCameraDevice.priv = this;
347    gCamCapability[cameraId]->version = CAM_HAL_V3;
348    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
349    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
350    gCamCapability[cameraId]->min_num_pp_bufs = 3;
351
352    pthread_cond_init(&mRequestCond, NULL);
353    mPendingRequest = 0;
354    mCurrentRequestId = -1;
355    pthread_mutex_init(&mMutex, NULL);
356
357    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
358        mDefaultMetadata[i] = NULL;
359
360#ifdef HAS_MULTIMEDIA_HINTS
361    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
362        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
363    }
364#endif
365
366    char prop[PROPERTY_VALUE_MAX];
367    property_get("persist.camera.raw.dump", prop, "0");
368    mEnableRawDump = atoi(prop);
369    if (mEnableRawDump)
370        CDBG("%s: Raw dump from Camera HAL enabled", __func__);
371}
372
373/*===========================================================================
374 * FUNCTION   : ~QCamera3HardwareInterface
375 *
376 * DESCRIPTION: destructor of QCamera3HardwareInterface
377 *
378 * PARAMETERS : none
379 *
380 * RETURN     : none
381 *==========================================================================*/
382QCamera3HardwareInterface::~QCamera3HardwareInterface()
383{
384    CDBG("%s: E", __func__);
385    /* We need to stop all streams before deleting any stream */
386
387
388    if (mRawDumpChannel) {
389        mRawDumpChannel->stop();
390    }
391
392    // NOTE: 'camera3_stream_t *' objects are already freed at
393    //        this stage by the framework
394    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
395        it != mStreamInfo.end(); it++) {
396        QCamera3Channel *channel = (*it)->channel;
397        if (channel) {
398            channel->stop();
399        }
400    }
401    if (mSupportChannel)
402        mSupportChannel->stop();
403
404    if (mAnalysisChannel) {
405        mAnalysisChannel->stop();
406    }
407
408    /* Turn off video hint */
409    updatePowerHint(m_bIsVideo, false);
410
411    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
412        it != mStreamInfo.end(); it++) {
413        QCamera3Channel *channel = (*it)->channel;
414        if (channel)
415            delete channel;
416        free (*it);
417    }
418    if (mSupportChannel) {
419        delete mSupportChannel;
420        mSupportChannel = NULL;
421    }
422
423    if (mAnalysisChannel) {
424        delete mAnalysisChannel;
425        mAnalysisChannel = NULL;
426    }
427    if (mRawDumpChannel) {
428        delete mRawDumpChannel;
429        mRawDumpChannel = NULL;
430    }
431    mPictureChannel = NULL;
432
433    /* Clean up all channels */
434    if (mCameraInitialized) {
435        if (mMetadataChannel) {
436            mMetadataChannel->stop();
437            delete mMetadataChannel;
438            mMetadataChannel = NULL;
439        }
440        if(!mFirstConfiguration){
441            //send the last unconfigure
442            cam_stream_size_info_t stream_config_info;
443            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
444            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
445            stream_config_info.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
446            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
447                    stream_config_info);
448            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
449            if (rc < 0) {
450                ALOGE("%s: set_parms failed for unconfigure", __func__);
451            }
452        }
453        deinitParameters();
454    }
455
456    if (mCameraOpened)
457        closeCamera();
458
459    mPendingBuffersMap.mPendingBufferList.clear();
460    mPendingReprocessResultList.clear();
461    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
462                i != mPendingRequestsList.end(); i++) {
463        clearInputBuffer(i->input_buffer);
464        i = mPendingRequestsList.erase(i);
465    }
466    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
467        if (mDefaultMetadata[i])
468            free_camera_metadata(mDefaultMetadata[i]);
469
470    pthread_cond_destroy(&mRequestCond);
471
472    pthread_mutex_destroy(&mMutex);
473    CDBG("%s: X", __func__);
474}
475
476/*===========================================================================
477 * FUNCTION   : camEvtHandle
478 *
479 * DESCRIPTION: Function registered to mm-camera-interface to handle events
480 *
481 * PARAMETERS :
482 *   @camera_handle : interface layer camera handle
483 *   @evt           : ptr to event
484 *   @user_data     : user data ptr
485 *
486 * RETURN     : none
487 *==========================================================================*/
488void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
489                                          mm_camera_event_t *evt,
490                                          void *user_data)
491{
492    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
493    if (obj && evt) {
494        switch(evt->server_event_type) {
495            case CAM_EVENT_TYPE_DAEMON_DIED:
496                ALOGE("%s: Fatal, camera daemon died", __func__);
497                camera3_notify_msg_t notify_msg;
498                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
499                notify_msg.type = CAMERA3_MSG_ERROR;
500                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
501                notify_msg.message.error.error_stream = NULL;
502                notify_msg.message.error.frame_number = 0;
503                obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
504                break;
505
506            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
507                CDBG("%s: HAL got request pull from Daemon", __func__);
508                pthread_mutex_lock(&obj->mMutex);
509                obj->mWokenUpByDaemon = true;
510                obj->unblockRequestIfNecessary();
511                pthread_mutex_unlock(&obj->mMutex);
512                break;
513
514            default:
515                CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
516                        evt->server_event_type);
517                break;
518        }
519    } else {
520        ALOGE("%s: NULL user_data/evt", __func__);
521    }
522}
523
524/*===========================================================================
525 * FUNCTION   : openCamera
526 *
527 * DESCRIPTION: open camera
528 *
529 * PARAMETERS :
530 *   @hw_device  : double ptr for camera device struct
531 *
532 * RETURN     : int32_t type of status
533 *              NO_ERROR  -- success
534 *              none-zero failure code
535 *==========================================================================*/
536int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
537{
538    int rc = 0;
539    if (mCameraOpened) {
540        *hw_device = NULL;
541        return PERMISSION_DENIED;
542    }
543
544    rc = openCamera();
545    if (rc == 0) {
546        *hw_device = &mCameraDevice.common;
547    } else
548        *hw_device = NULL;
549
550    return rc;
551}
552
553/*===========================================================================
554 * FUNCTION   : openCamera
555 *
556 * DESCRIPTION: open camera
557 *
558 * PARAMETERS : none
559 *
560 * RETURN     : int32_t type of status
561 *              NO_ERROR  -- success
562 *              none-zero failure code
563 *==========================================================================*/
564int QCamera3HardwareInterface::openCamera()
565{
566    int rc = 0;
567
568    ATRACE_CALL();
569    if (mCameraHandle) {
570        ALOGE("Failure: Camera already opened");
571        return ALREADY_EXISTS;
572    }
573    mCameraHandle = camera_open((uint8_t)mCameraId);
574    if (!mCameraHandle) {
575        ALOGE("camera_open failed.");
576        return UNKNOWN_ERROR;
577    }
578
579    mCameraOpened = true;
580
581    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
582            camEvtHandle, (void *)this);
583
584    if (rc < 0) {
585        ALOGE("%s: Error, failed to register event callback", __func__);
586        /* Not closing camera here since it is already handled in destructor */
587        return FAILED_TRANSACTION;
588    }
589    mFirstConfiguration = true;
590    return NO_ERROR;
591}
592
593/*===========================================================================
594 * FUNCTION   : closeCamera
595 *
596 * DESCRIPTION: close camera
597 *
598 * PARAMETERS : none
599 *
600 * RETURN     : int32_t type of status
601 *              NO_ERROR  -- success
602 *              none-zero failure code
603 *==========================================================================*/
604int QCamera3HardwareInterface::closeCamera()
605{
606    ATRACE_CALL();
607    int rc = NO_ERROR;
608
609    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
610    mCameraHandle = NULL;
611    mCameraOpened = false;
612
613    return rc;
614}
615
616/*===========================================================================
617 * FUNCTION   : initialize
618 *
619 * DESCRIPTION: Initialize frameworks callback functions
620 *
621 * PARAMETERS :
622 *   @callback_ops : callback function to frameworks
623 *
624 * RETURN     :
625 *
626 *==========================================================================*/
627int QCamera3HardwareInterface::initialize(
628        const struct camera3_callback_ops *callback_ops)
629{
630    ATRACE_CALL();
631    int rc;
632
633    pthread_mutex_lock(&mMutex);
634
635    rc = initParameters();
636    if (rc < 0) {
637        ALOGE("%s: initParamters failed %d", __func__, rc);
638       goto err1;
639    }
640    mCallbackOps = callback_ops;
641
642    pthread_mutex_unlock(&mMutex);
643    mCameraInitialized = true;
644    return 0;
645
646err1:
647    pthread_mutex_unlock(&mMutex);
648    return rc;
649}
650
651/*===========================================================================
652 * FUNCTION   : validateStreamDimensions
653 *
654 * DESCRIPTION: Check if the configuration requested are those advertised
655 *
656 * PARAMETERS :
657 *   @stream_list : streams to be configured
658 *
659 * RETURN     :
660 *
661 *==========================================================================*/
662int QCamera3HardwareInterface::validateStreamDimensions(
663        camera3_stream_configuration_t *streamList)
664{
665    int rc = NO_ERROR;
666    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
667    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
668    size_t count = 0;
669
670    camera3_stream_t *inputStream = NULL;
671    /*
672    * Loop through all streams to find input stream if it exists*
673    */
674    for (size_t i = 0; i< streamList->num_streams; i++) {
675        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
676            if (inputStream != NULL) {
677                ALOGE("%s: Error, Multiple input streams requested");
678                return -EINVAL;
679            }
680            inputStream = streamList->streams[i];
681        }
682    }
683    /*
684    * Loop through all streams requested in configuration
685    * Check if unsupported sizes have been requested on any of them
686    */
687    for (size_t j = 0; j < streamList->num_streams; j++) {
688        bool sizeFound = false;
689        size_t jpeg_sizes_cnt = 0;
690        camera3_stream_t *newStream = streamList->streams[j];
691
692        uint32_t rotatedHeight = newStream->height;
693        uint32_t rotatedWidth = newStream->width;
694        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
695                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
696            rotatedHeight = newStream->width;
697            rotatedWidth = newStream->height;
698        }
699
700        /*
701        * Sizes are different for each type of stream format check against
702        * appropriate table.
703        */
704        switch (newStream->format) {
705        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
706        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
707        case HAL_PIXEL_FORMAT_RAW10:
708            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
709            for (size_t i = 0; i < count; i++) {
710                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
711                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
712                    sizeFound = true;
713                    break;
714                }
715            }
716            break;
717        case HAL_PIXEL_FORMAT_BLOB:
718            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
719            /* Generate JPEG sizes table */
720            makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
721                    count,
722                    MAX_SIZES_CNT,
723                    available_processed_sizes);
724            jpeg_sizes_cnt = filterJpegSizes(
725                    available_jpeg_sizes,
726                    available_processed_sizes,
727                    count * 2,
728                    MAX_SIZES_CNT * 2,
729                    gCamCapability[mCameraId]->active_array_size,
730                    gCamCapability[mCameraId]->max_downscale_factor);
731
732            /* Verify set size against generated sizes table */
733            for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
734                if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
735                        ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
736                    sizeFound = true;
737                    break;
738                }
739            }
740            break;
741
742
743        case HAL_PIXEL_FORMAT_YCbCr_420_888:
744        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
745        default:
746            /* ZSL stream will be full active array size validate that*/
747            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
748                || newStream->stream_type == CAMERA3_STREAM_INPUT
749                || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) {
750                if (((int32_t)rotatedWidth ==
751                            gCamCapability[mCameraId]->active_array_size.width) &&
752                        ((int32_t)rotatedHeight ==
753                                gCamCapability[mCameraId]->active_array_size.height)) {
754                    sizeFound = true;
755                }
756                /* We could potentially break here to enforce ZSL stream
757                 * set from frameworks always has full active array size
758                 * but it is not clear from spec if framework will always
759                 * follow that, also we have logic to override to full array
760                 * size, so keeping this logic lenient at the moment.
761                 */
762            }
763
764            /* Non ZSL stream still need to conform to advertised sizes*/
765            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
766                    MAX_SIZES_CNT);
767            for (size_t i = 0; i < count; i++) {
768                if (((int32_t)rotatedWidth ==
769                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
770                        ((int32_t)rotatedHeight ==
771                                gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
772                    sizeFound = true;
773                break;
774                }
775            }
776            break;
777        } /* End of switch(newStream->format) */
778
779        /* We error out even if a single stream has unsupported size set */
780        if (!sizeFound) {
781            ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
782                  "type:%d", __func__, rotatedWidth, rotatedHeight,
783                  newStream->format);
784            ALOGE("%s: Active array size is  %d x %d", __func__,
785                    gCamCapability[mCameraId]->active_array_size.width,
786                    gCamCapability[mCameraId]->active_array_size.height);
787            rc = -EINVAL;
788            break;
789        }
790    } /* End of for each stream */
791    return rc;
792}
793
794/*==============================================================================
795 * FUNCTION   : isSupportChannelNeeded
796 *
797 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
798 *
799 * PARAMETERS :
800 *   @stream_list : streams to be configured
801 *
802 * RETURN     : Boolen true/false decision
803 *
804 *==========================================================================*/
805bool QCamera3HardwareInterface::isSupportChannelNeeded(camera3_stream_configuration_t *streamList,
806        cam_stream_size_info_t stream_config_info)
807{
808    uint32_t i;
809    bool bSuperSetPresent = false;
810    /* Check for conditions where PProc pipeline does not have any streams*/
811    for (i = 0; i < stream_config_info.num_streams; i++) {
812        if (stream_config_info.postprocess_mask[i] == CAM_QCOM_FEATURE_PP_SUPERSET) {
813            bSuperSetPresent = true;
814            break;
815        }
816    }
817
818    if (bSuperSetPresent == false )
819        return true;
820
821    /* Dummy stream needed if only raw or jpeg streams present */
822    for (i = 0;i < streamList->num_streams;i++) {
823        switch(streamList->streams[i]->format) {
824            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
825            case HAL_PIXEL_FORMAT_RAW10:
826            case HAL_PIXEL_FORMAT_RAW16:
827            case HAL_PIXEL_FORMAT_BLOB:
828                break;
829            default:
830                return false;
831        }
832    }
833    return true;
834}
835
836/*==============================================================================
837 * FUNCTION   : getSensorOutputSize
838 *
839 * DESCRIPTION: Get sensor output size based on current stream configuratoin
840 *
841 * PARAMETERS :
842 *   @sensor_dim : sensor output dimension (output)
843 *
844 * RETURN     : int32_t type of status
845 *              NO_ERROR  -- success
846 *              none-zero failure code
847 *
848 *==========================================================================*/
849int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
850{
851    int32_t rc = NO_ERROR;
852
853    cam_dimension_t max_dim = {0, 0};
854    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
855        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
856            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
857        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
858            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
859    }
860
861    clear_metadata_buffer(mParameters);
862
863    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
864            max_dim);
865    if (rc != NO_ERROR) {
866        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
867        return rc;
868    }
869
870    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
871    if (rc != NO_ERROR) {
872        ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
873        return rc;
874    }
875
876    clear_metadata_buffer(mParameters);
877    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
878
879    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
880            mParameters);
881    if (rc != NO_ERROR) {
882        ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
883        return rc;
884    }
885
886    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
887    ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
888
889    return rc;
890}
891
892/*==============================================================================
893 * FUNCTION   : updatePowerHint
894 *
895 * DESCRIPTION: update power hint based on whether it's video mode or not.
896 *
897 * PARAMETERS :
898 *   @bWasVideo : whether video mode before the switch
899 *   @bIsVideo  : whether new mode is video or not.
900 *
901 * RETURN     : NULL
902 *
903 *==========================================================================*/
904void QCamera3HardwareInterface::updatePowerHint(bool bWasVideo, bool bIsVideo)
905{
906#ifdef HAS_MULTIMEDIA_HINTS
907    if (bWasVideo == bIsVideo)
908        return;
909
910    if (m_pPowerModule && m_pPowerModule->powerHint) {
911        if (bIsVideo)
912            m_pPowerModule->powerHint(m_pPowerModule,
913                    POWER_HINT_VIDEO_ENCODE, (void *)"state=1");
914        else
915            m_pPowerModule->powerHint(m_pPowerModule,
916                    POWER_HINT_VIDEO_ENCODE, (void *)"state=0");
917     }
918#endif
919}
920
921/*===========================================================================
922 * FUNCTION   : configureStreams
923 *
924 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
925 *              and output streams.
926 *
927 * PARAMETERS :
928 *   @stream_list : streams to be configured
929 *
930 * RETURN     :
931 *
932 *==========================================================================*/
933int QCamera3HardwareInterface::configureStreams(
934        camera3_stream_configuration_t *streamList)
935{
936    ATRACE_CALL();
937    int rc = 0;
938    bool bWasVideo = m_bIsVideo;
939    uint32_t numBuffers = MAX_INFLIGHT_REQUESTS;
940
941    // Sanity check stream_list
942    if (streamList == NULL) {
943        ALOGE("%s: NULL stream configuration", __func__);
944        return BAD_VALUE;
945    }
946    if (streamList->streams == NULL) {
947        ALOGE("%s: NULL stream list", __func__);
948        return BAD_VALUE;
949    }
950
951    if (streamList->num_streams < 1) {
952        ALOGE("%s: Bad number of streams requested: %d", __func__,
953                streamList->num_streams);
954        return BAD_VALUE;
955    }
956
957    if (streamList->num_streams >= MAX_NUM_STREAMS) {
958        ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
959                MAX_NUM_STREAMS, streamList->num_streams);
960        return BAD_VALUE;
961    }
962
963    mOpMode = streamList->operation_mode;
964    CDBG("%s: mOpMode: %d", __func__, mOpMode);
965
966    /* first invalidate all the steams in the mStreamList
967     * if they appear again, they will be validated */
968    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
969            it != mStreamInfo.end(); it++) {
970        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
971        channel->stop();
972        (*it)->status = INVALID;
973    }
974
975    if (mRawDumpChannel) {
976        mRawDumpChannel->stop();
977        delete mRawDumpChannel;
978        mRawDumpChannel = NULL;
979    }
980
981    if (mSupportChannel)
982        mSupportChannel->stop();
983
984    if (mAnalysisChannel) {
985        mAnalysisChannel->stop();
986    }
987    if (mMetadataChannel) {
988        /* If content of mStreamInfo is not 0, there is metadata stream */
989        mMetadataChannel->stop();
990    }
991
992    pthread_mutex_lock(&mMutex);
993
994    /* Check whether we have video stream */
995    m_bIs4KVideo = false;
996    m_bIsVideo = false;
997    m_bEisSupportedSize = false;
998    bool isZsl = false;
999    uint32_t videoWidth = 0U;
1000    uint32_t videoHeight = 0U;
1001    size_t rawStreamCnt = 0;
1002    size_t stallStreamCnt = 0;
1003    size_t processedStreamCnt = 0;
1004    // Number of streams on ISP encoder path
1005    size_t numStreamsOnEncoder = 0;
1006    cam_dimension_t maxViewfinderSize;
1007    bool bJpegExceeds4K = false;
1008    bool bUseCommonFeatureMask = false;
1009    uint32_t commonFeatureMask = 0;
1010    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1011    camera3_stream_t *inputStream = NULL;
1012
1013    /*EIS configuration*/
1014    bool eisSupported = false;
1015    bool oisSupported = false;
1016    int32_t margin_index = -1;
1017    uint8_t eis_prop_set;
1018    uint32_t maxEisWidth = 0;
1019    uint32_t maxEisHeight = 0;
1020    int32_t hal_version = CAM_HAL_V3;
1021
1022    size_t count = IS_TYPE_MAX;
1023    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1024    for (size_t i = 0; i < count; i++) {
1025        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1026            eisSupported = true;
1027            margin_index = (int32_t)i;
1028            break;
1029        }
1030    }
1031
1032    count = CAM_OPT_STAB_MAX;
1033    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1034    for (size_t i = 0; i < count; i++) {
1035        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1036            oisSupported = true;
1037            break;
1038        }
1039    }
1040
1041    if (eisSupported) {
1042        maxEisWidth = (uint32_t)
1043            ((gCamCapability[mCameraId]->active_array_size.width * 1.0) /
1044            (1+ gCamCapability[mCameraId]->supported_is_type_margins[margin_index]));
1045         maxEisHeight = (uint32_t)
1046            ((gCamCapability[mCameraId]->active_array_size.height * 1.0) /
1047            (1+ gCamCapability[mCameraId]->supported_is_type_margins[margin_index]));
1048    }
1049
1050    /* EIS setprop control */
1051    char eis_prop[PROPERTY_VALUE_MAX];
1052    memset(eis_prop, 0, sizeof(eis_prop));
1053    property_get("camera.eis.enable", eis_prop, "0");
1054    eis_prop_set = (uint8_t)atoi(eis_prop);
1055
1056    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported);
1057
1058    /* stream configurations */
1059    for (size_t i = 0; i < streamList->num_streams; i++) {
1060        camera3_stream_t *newStream = streamList->streams[i];
1061        ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1062                "height = %d, rotation = %d",
1063                __func__, i, newStream->stream_type, newStream->format,
1064                newStream->width, newStream->height, newStream->rotation);
1065        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1066                newStream->stream_type == CAMERA3_STREAM_INPUT){
1067            isZsl = true;
1068        }
1069        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1070            inputStream = newStream;
1071        }
1072
1073        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1074            if (newStream->width > VIDEO_4K_WIDTH ||
1075                    newStream->height > VIDEO_4K_HEIGHT)
1076                bJpegExceeds4K = true;
1077        }
1078
1079        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1080                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1081            m_bIsVideo = true;
1082            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1083                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1084                videoWidth = newStream->width;
1085                videoHeight = newStream->height;
1086                m_bIs4KVideo = true;
1087            }
1088            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1089                                  (newStream->height <= maxEisHeight);
1090        }
1091        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1092                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1093            switch (newStream->format) {
1094            case HAL_PIXEL_FORMAT_BLOB:
1095                stallStreamCnt++;
1096                if (((int32_t)newStream->width > maxViewfinderSize.width) ||
1097                        ((int32_t)newStream->height > maxViewfinderSize.height)) {
1098                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1099                    numStreamsOnEncoder++;
1100                }
1101                break;
1102            case HAL_PIXEL_FORMAT_RAW10:
1103            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1104            case HAL_PIXEL_FORMAT_RAW16:
1105                rawStreamCnt++;
1106                break;
1107            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1108                processedStreamCnt++;
1109                if (((int32_t)newStream->width > maxViewfinderSize.width) ||
1110                        ((int32_t)newStream->height > maxViewfinderSize.height)) {
1111                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1112                            newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) {
1113                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1114                    } else {
1115                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1116                    }
1117                    numStreamsOnEncoder++;
1118                }
1119                break;
1120            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1121            default:
1122                processedStreamCnt++;
1123                if (((int32_t)newStream->width > maxViewfinderSize.width) ||
1124                        ((int32_t)newStream->height > maxViewfinderSize.height)) {
1125                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1126                    numStreamsOnEncoder++;
1127                }
1128                break;
1129            }
1130
1131        }
1132    }
1133
1134    /* Check if num_streams is sane */
1135    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1136            rawStreamCnt > MAX_RAW_STREAMS ||
1137            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1138        ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1139                __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1140        pthread_mutex_unlock(&mMutex);
1141        return -EINVAL;
1142    }
1143    /* Check whether we have zsl stream or 4k video case */
1144    if (isZsl && m_bIsVideo) {
1145        ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1146        pthread_mutex_unlock(&mMutex);
1147        return -EINVAL;
1148    }
1149    /* Check if stream sizes are sane */
1150    if (numStreamsOnEncoder > 2) {
1151        ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1152                __func__);
1153        pthread_mutex_unlock(&mMutex);
1154        return -EINVAL;
1155    } else if (1 < numStreamsOnEncoder){
1156        bUseCommonFeatureMask = true;
1157        CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1158                __func__);
1159    }
1160    /* Check if BLOB size is greater than 4k in 4k recording case */
1161    if (m_bIs4KVideo && bJpegExceeds4K) {
1162        ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1163                __func__);
1164        pthread_mutex_unlock(&mMutex);
1165        return -EINVAL;
1166    }
1167
1168    rc = validateStreamDimensions(streamList);
1169    if (rc == NO_ERROR) {
1170        rc = validateStreamRotations(streamList);
1171    }
1172    if (rc != NO_ERROR) {
1173        ALOGE("%s: Invalid stream configuration requested!", __func__);
1174        pthread_mutex_unlock(&mMutex);
1175        return rc;
1176    }
1177
1178    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1179    camera3_stream_t *jpegStream = NULL;
1180    for (size_t i = 0; i < streamList->num_streams; i++) {
1181        camera3_stream_t *newStream = streamList->streams[i];
1182        CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1183                "stream size : %d x %d, stream rotation = %d",
1184                __func__, newStream->stream_type, newStream->format,
1185                newStream->width, newStream->height, newStream->rotation);
1186        //if the stream is in the mStreamList validate it
1187        bool stream_exists = false;
1188        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1189                it != mStreamInfo.end(); it++) {
1190            if ((*it)->stream == newStream) {
1191                QCamera3Channel *channel =
1192                    (QCamera3Channel*)(*it)->stream->priv;
1193                stream_exists = true;
1194                if (channel)
1195                    delete channel;
1196                (*it)->status = VALID;
1197                (*it)->stream->priv = NULL;
1198                (*it)->channel = NULL;
1199            }
1200        }
1201        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1202            //new stream
1203            stream_info_t* stream_info;
1204            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1205            if (!stream_info) {
1206               ALOGE("%s: Could not allocate stream info", __func__);
1207               rc = -ENOMEM;
1208               pthread_mutex_unlock(&mMutex);
1209               return rc;
1210            }
1211            stream_info->stream = newStream;
1212            stream_info->status = VALID;
1213            stream_info->channel = NULL;
1214            mStreamInfo.push_back(stream_info);
1215        }
1216        /* Covers Opaque ZSL and API1 F/W ZSL */
1217        if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL
1218                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1219            if (zslStream != NULL) {
1220                ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1221                pthread_mutex_unlock(&mMutex);
1222                return BAD_VALUE;
1223            }
1224            zslStream = newStream;
1225        }
1226        /* Covers YUV reprocess */
1227        if (inputStream != NULL) {
1228            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1229                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1230                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1231                    && inputStream->width == newStream->width
1232                    && inputStream->height == newStream->height) {
1233                if (zslStream != NULL) {
1234                    /* This scenario indicates multiple YUV streams with same size
1235                     * as input stream have been requested, since zsl stream handle
1236                     * is solely use for the purpose of overriding the size of streams
1237                     * which share h/w streams we will just make a guess here as to
1238                     * which of the stream is a ZSL stream, this will be refactored
1239                     * once we make generic logic for streams sharing encoder output
1240                     */
1241                    CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1242                }
1243                zslStream = newStream;
1244            }
1245        }
1246        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1247            jpegStream = newStream;
1248        }
1249    }
1250
1251    cleanAndSortStreamInfo();
1252    if (mMetadataChannel) {
1253        delete mMetadataChannel;
1254        mMetadataChannel = NULL;
1255    }
1256    if (mSupportChannel) {
1257        delete mSupportChannel;
1258        mSupportChannel = NULL;
1259    }
1260
1261    if (mAnalysisChannel) {
1262        delete mAnalysisChannel;
1263        mAnalysisChannel = NULL;
1264    }
1265
1266    //Create metadata channel and initialize it
1267    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1268                    mCameraHandle->ops, captureResultCb,
1269                    &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1270    if (mMetadataChannel == NULL) {
1271        ALOGE("%s: failed to allocate metadata channel", __func__);
1272        rc = -ENOMEM;
1273        pthread_mutex_unlock(&mMutex);
1274        return rc;
1275    }
1276    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1277    if (rc < 0) {
1278        ALOGE("%s: metadata channel initialization failed", __func__);
1279        delete mMetadataChannel;
1280        mMetadataChannel = NULL;
1281        pthread_mutex_unlock(&mMutex);
1282        return rc;
1283    }
1284
1285    /* Create analysis stream if h/w support is available */
1286    if (gCamCapability[mCameraId]->hw_analysis_supported) {
1287        mAnalysisChannel = new QCamera3SupportChannel(
1288                mCameraHandle->camera_handle,
1289                mCameraHandle->ops,
1290                &gCamCapability[mCameraId]->padding_info,
1291                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1292                CAM_STREAM_TYPE_ANALYSIS,
1293                &gCamCapability[mCameraId]->analysis_recommended_res,
1294                this);
1295        if (!mAnalysisChannel) {
1296            ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1297            pthread_mutex_unlock(&mMutex);
1298            return -ENOMEM;
1299        }
1300    }
1301
1302    bool isRawStreamRequested = false;
1303    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1304    /* Allocate channel objects for the requested streams */
1305    for (size_t i = 0; i < streamList->num_streams; i++) {
1306        camera3_stream_t *newStream = streamList->streams[i];
1307        uint32_t stream_usage = newStream->usage;
1308        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1309        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1310        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1311                || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) &&
1312            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
1313            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1314            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1315        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1316                CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1317        } else {
1318            //for non zsl streams find out the format
1319            switch (newStream->format) {
1320            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1321              {
1322                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1323                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1324                 } else {
1325                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1326                 }
1327                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1328                         = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1329
1330                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1331                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1332                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1333                             newStream->height;
1334                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1335                             newStream->width;
1336                 }
1337              }
1338              break;
1339           case HAL_PIXEL_FORMAT_YCbCr_420_888:
1340              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1341              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1342              break;
1343           case HAL_PIXEL_FORMAT_BLOB:
1344              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1345              if (m_bIs4KVideo && !isZsl) {
1346                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1347                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1348              } else {
1349                  if (bUseCommonFeatureMask &&
1350                          (((int32_t)newStream->width > maxViewfinderSize.width) ||
1351                                  ((int32_t)newStream->height > maxViewfinderSize.height))) {
1352                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1353                  } else {
1354                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1355                  }
1356              }
1357              if (isZsl) {
1358                  if (zslStream) {
1359                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1360                              (int32_t)zslStream->width;
1361                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1362                              (int32_t)zslStream->height;
1363                  } else {
1364                      ALOGE("%s: Error, No ZSL stream identified",__func__);
1365                      pthread_mutex_unlock(&mMutex);
1366                      return -EINVAL;
1367                  }
1368              } else if (m_bIs4KVideo) {
1369                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1370                          (int32_t)videoWidth;
1371                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1372                          (int32_t)videoHeight;
1373              }
1374              break;
1375           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1376           case HAL_PIXEL_FORMAT_RAW16:
1377           case HAL_PIXEL_FORMAT_RAW10:
1378              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1379              isRawStreamRequested = true;
1380              break;
1381           default:
1382              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1383              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1384              break;
1385            }
1386
1387        }
1388        if (newStream->priv == NULL) {
1389            //New stream, construct channel
1390            switch (newStream->stream_type) {
1391            case CAMERA3_STREAM_INPUT:
1392                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1393                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1394                break;
1395            case CAMERA3_STREAM_BIDIRECTIONAL:
1396                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1397                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1398                break;
1399            case CAMERA3_STREAM_OUTPUT:
1400                /* For video encoding stream, set read/write rarely
1401                 * flag so that they may be set to un-cached */
1402                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1403                    newStream->usage |=
1404                         (GRALLOC_USAGE_SW_READ_RARELY |
1405                         GRALLOC_USAGE_SW_WRITE_RARELY |
1406                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1407                else if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL)
1408                    CDBG("%s: ZSL usage flag skipping", __func__);
1409                else
1410                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1411                break;
1412            default:
1413                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1414                break;
1415            }
1416
1417            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1418                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1419                QCamera3Channel *channel = NULL;
1420                switch (newStream->format) {
1421                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1422                    /* use higher number of buffers for HFR mode */
1423                    if((newStream->format ==
1424                            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) &&
1425                            (newStream->usage &
1426                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1427                            (streamList->operation_mode ==
1428                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1429                    ) {
1430                        numBuffers = MAX_INFLIGHT_REQUESTS * MAX_HFR_BATCH_SIZE;
1431                        ALOGI("%s: num video buffers in HFR mode: %d",
1432                                __func__, numBuffers);
1433                    }
1434                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1435                            mCameraHandle->ops, captureResultCb,
1436                            &gCamCapability[mCameraId]->padding_info,
1437                            this,
1438                            newStream,
1439                            (cam_stream_type_t)
1440                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1441                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1442                            mMetadataChannel,
1443                            numBuffers);
1444                    if (channel == NULL) {
1445                        ALOGE("%s: allocation of channel failed", __func__);
1446                        pthread_mutex_unlock(&mMutex);
1447                        return -ENOMEM;
1448                    }
1449                    newStream->max_buffers = channel->getNumBuffers();
1450                    newStream->priv = channel;
1451                    break;
1452                case HAL_PIXEL_FORMAT_YCbCr_420_888:
1453                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1454                            mCameraHandle->ops, captureResultCb,
1455                            &gCamCapability[mCameraId]->padding_info,
1456                            this,
1457                            newStream,
1458                            (cam_stream_type_t)
1459                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1460                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1461                            mMetadataChannel);
1462                    if (channel == NULL) {
1463                        ALOGE("%s: allocation of YUV channel failed", __func__);
1464                        pthread_mutex_unlock(&mMutex);
1465                        return -ENOMEM;
1466                    }
1467                    newStream->max_buffers = channel->getNumBuffers();
1468                    newStream->priv = channel;
1469                    break;
1470                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1471                case HAL_PIXEL_FORMAT_RAW16:
1472                case HAL_PIXEL_FORMAT_RAW10:
1473                    mRawChannel = new QCamera3RawChannel(
1474                            mCameraHandle->camera_handle,
1475                            mCameraHandle->ops, captureResultCb,
1476                            &gCamCapability[mCameraId]->padding_info,
1477                            this, newStream, CAM_QCOM_FEATURE_NONE,
1478                            mMetadataChannel,
1479                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1480                    if (mRawChannel == NULL) {
1481                        ALOGE("%s: allocation of raw channel failed", __func__);
1482                        pthread_mutex_unlock(&mMutex);
1483                        return -ENOMEM;
1484                    }
1485                    newStream->max_buffers = mRawChannel->getNumBuffers();
1486                    newStream->priv = (QCamera3Channel*)mRawChannel;
1487                    break;
1488                case HAL_PIXEL_FORMAT_BLOB:
1489                    // Max live snapshot inflight buffer is 1. This is to mitigate
1490                    // frame drop issues for video snapshot. The more buffers being
1491                    // allocated, the more frame drops there are.
1492                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
1493                            mCameraHandle->ops, captureResultCb,
1494                            &gCamCapability[mCameraId]->padding_info, this, newStream,
1495                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1496                            m_bIs4KVideo, mMetadataChannel,
1497                            (m_bIsVideo ? 1 : MAX_INFLIGHT_REQUESTS));
1498                    if (mPictureChannel == NULL) {
1499                        ALOGE("%s: allocation of channel failed", __func__);
1500                        pthread_mutex_unlock(&mMutex);
1501                        return -ENOMEM;
1502                    }
1503                    newStream->priv = (QCamera3Channel*)mPictureChannel;
1504                    newStream->max_buffers = mPictureChannel->getNumBuffers();
1505                    break;
1506
1507                default:
1508                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1509                    break;
1510                }
1511            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1512                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1513            } else {
1514                ALOGE("%s: Error, Unknown stream type", __func__);
1515                return -EINVAL;
1516            }
1517
1518            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1519                    it != mStreamInfo.end(); it++) {
1520                if ((*it)->stream == newStream) {
1521                    (*it)->channel = (QCamera3Channel*) newStream->priv;
1522                    break;
1523                }
1524            }
1525        } else {
1526            // Channel already exists for this stream
1527            // Do nothing for now
1528        }
1529
1530    /* Do not add entries for input stream in metastream info
1531         * since there is no real stream associated with it
1532         */
1533        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1534            mStreamConfigInfo.num_streams++;
1535    }
1536
1537    if (isZsl) {
1538        if (zslStream == NULL) {
1539            ALOGE("%s: Error Zsl stream handle missing", __func__);
1540            pthread_mutex_unlock(&mMutex);
1541            return -EINVAL;
1542        }
1543        /* This override is possible since the f/w gaurantees that the ZSL
1544           stream will always be the active array size in case of Bidirectional
1545           or will be limited to the max i/p stream size which we can control to
1546           be equal to be the largest YUV/Opaque stream size
1547           */
1548        if (mPictureChannel) {
1549           mPictureChannel->overrideYuvSize(zslStream->width, zslStream->height);
1550        }
1551    } else if (mPictureChannel && m_bIs4KVideo) {
1552        mPictureChannel->overrideYuvSize(videoWidth, videoHeight);
1553    }
1554
1555    //RAW DUMP channel
1556    if (mEnableRawDump && isRawStreamRequested == false){
1557        cam_dimension_t rawDumpSize;
1558        rawDumpSize = getMaxRawSize(mCameraId);
1559        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1560                                  mCameraHandle->ops,
1561                                  rawDumpSize,
1562                                  &gCamCapability[mCameraId]->padding_info,
1563                                  this, CAM_QCOM_FEATURE_NONE);
1564        if (!mRawDumpChannel) {
1565            ALOGE("%s: Raw Dump channel cannot be created", __func__);
1566            pthread_mutex_unlock(&mMutex);
1567            return -ENOMEM;
1568        }
1569    }
1570
1571
1572    if (mAnalysisChannel) {
1573        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1574                gCamCapability[mCameraId]->analysis_recommended_res;
1575        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1576                CAM_STREAM_TYPE_ANALYSIS;
1577        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1578                CAM_QCOM_FEATURE_FACE_DETECTION;
1579        mStreamConfigInfo.num_streams++;
1580    }
1581
1582    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1583        mSupportChannel = new QCamera3SupportChannel(
1584                mCameraHandle->camera_handle,
1585                mCameraHandle->ops,
1586                &gCamCapability[mCameraId]->padding_info,
1587                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1588                CAM_STREAM_TYPE_CALLBACK,
1589                &QCamera3SupportChannel::kDim,
1590                this);
1591        if (!mSupportChannel) {
1592            ALOGE("%s: dummy channel cannot be created", __func__);
1593            pthread_mutex_unlock(&mMutex);
1594            return -ENOMEM;
1595        }
1596    }
1597
1598    if (mSupportChannel) {
1599        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1600                QCamera3SupportChannel::kDim;
1601        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1602                CAM_STREAM_TYPE_CALLBACK;
1603        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1604                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1605        mStreamConfigInfo.num_streams++;
1606    }
1607
1608    if (mRawDumpChannel) {
1609        cam_dimension_t rawSize;
1610        rawSize = getMaxRawSize(mCameraId);
1611        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1612                rawSize;
1613        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1614                CAM_STREAM_TYPE_RAW;
1615        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1616                CAM_QCOM_FEATURE_NONE;
1617        mStreamConfigInfo.num_streams++;
1618    }
1619    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1620    mStreamConfigInfo.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
1621
1622    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1623    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
1624                i != mPendingRequestsList.end(); i++) {
1625        clearInputBuffer(i->input_buffer);
1626        i = mPendingRequestsList.erase(i);
1627    }
1628    mPendingFrameDropList.clear();
1629    // Initialize/Reset the pending buffers list
1630    mPendingBuffersMap.num_buffers = 0;
1631    mPendingBuffersMap.mPendingBufferList.clear();
1632    mPendingReprocessResultList.clear();
1633
1634    mFirstRequest = true;
1635    //Get min frame duration for this streams configuration
1636    deriveMinFrameDuration();
1637
1638    /* Turn on video hint only if video stream is configured */
1639    updatePowerHint(bWasVideo, m_bIsVideo);
1640
1641    pthread_mutex_unlock(&mMutex);
1642    return rc;
1643}
1644
1645/*===========================================================================
1646 * FUNCTION   : validateCaptureRequest
1647 *
1648 * DESCRIPTION: validate a capture request from camera service
1649 *
1650 * PARAMETERS :
1651 *   @request : request from framework to process
1652 *
1653 * RETURN     :
1654 *
1655 *==========================================================================*/
1656int QCamera3HardwareInterface::validateCaptureRequest(
1657                    camera3_capture_request_t *request)
1658{
1659    ssize_t idx = 0;
1660    const camera3_stream_buffer_t *b;
1661    CameraMetadata meta;
1662
1663    /* Sanity check the request */
1664    if (request == NULL) {
1665        ALOGE("%s: NULL capture request", __func__);
1666        return BAD_VALUE;
1667    }
1668
1669    if (request->settings == NULL && mFirstRequest) {
1670        /*settings cannot be null for the first request*/
1671        return BAD_VALUE;
1672    }
1673
1674    uint32_t frameNumber = request->frame_number;
1675    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1676        ALOGE("%s: Request %d: No output buffers provided!",
1677                __FUNCTION__, frameNumber);
1678        return BAD_VALUE;
1679    }
1680    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
1681        ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
1682                __func__, request->num_output_buffers, MAX_NUM_STREAMS);
1683        return BAD_VALUE;
1684    }
1685    if (request->input_buffer != NULL) {
1686        b = request->input_buffer;
1687        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1688            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1689                    __func__, frameNumber, (long)idx);
1690            return BAD_VALUE;
1691        }
1692        if (b->release_fence != -1) {
1693            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1694                    __func__, frameNumber, (long)idx);
1695            return BAD_VALUE;
1696        }
1697        if (b->buffer == NULL) {
1698            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1699                    __func__, frameNumber, (long)idx);
1700            return BAD_VALUE;
1701        }
1702    }
1703
1704    // Validate all buffers
1705    b = request->output_buffers;
1706    do {
1707        QCamera3Channel *channel =
1708                static_cast<QCamera3Channel*>(b->stream->priv);
1709        if (channel == NULL) {
1710            ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1711                    __func__, frameNumber, (long)idx);
1712            return BAD_VALUE;
1713        }
1714        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1715            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1716                    __func__, frameNumber, (long)idx);
1717            return BAD_VALUE;
1718        }
1719        if (b->release_fence != -1) {
1720            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1721                    __func__, frameNumber, (long)idx);
1722            return BAD_VALUE;
1723        }
1724        if (b->buffer == NULL) {
1725            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1726                    __func__, frameNumber, (long)idx);
1727            return BAD_VALUE;
1728        }
1729        if (*(b->buffer) == NULL) {
1730            ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
1731                    __func__, frameNumber, (long)idx);
1732            return BAD_VALUE;
1733        }
1734        idx++;
1735        b = request->output_buffers + idx;
1736    } while (idx < (ssize_t)request->num_output_buffers);
1737
1738    return NO_ERROR;
1739}
1740
1741/*===========================================================================
1742 * FUNCTION   : deriveMinFrameDuration
1743 *
1744 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
1745 *              on currently configured streams.
1746 *
1747 * PARAMETERS : NONE
1748 *
1749 * RETURN     : NONE
1750 *
1751 *==========================================================================*/
1752void QCamera3HardwareInterface::deriveMinFrameDuration()
1753{
1754    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
1755
1756    maxJpegDim = 0;
1757    maxProcessedDim = 0;
1758    maxRawDim = 0;
1759
1760    // Figure out maximum jpeg, processed, and raw dimensions
1761    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1762        it != mStreamInfo.end(); it++) {
1763
1764        // Input stream doesn't have valid stream_type
1765        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
1766            continue;
1767
1768        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
1769        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1770            if (dimension > maxJpegDim)
1771                maxJpegDim = dimension;
1772        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1773                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1774                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
1775            if (dimension > maxRawDim)
1776                maxRawDim = dimension;
1777        } else {
1778            if (dimension > maxProcessedDim)
1779                maxProcessedDim = dimension;
1780        }
1781    }
1782
1783    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
1784            MAX_SIZES_CNT);
1785
1786    //Assume all jpeg dimensions are in processed dimensions.
1787    if (maxJpegDim > maxProcessedDim)
1788        maxProcessedDim = maxJpegDim;
1789    //Find the smallest raw dimension that is greater or equal to jpeg dimension
1790    if (maxProcessedDim > maxRawDim) {
1791        maxRawDim = INT32_MAX;
1792
1793        for (size_t i = 0; i < count; i++) {
1794            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
1795                    gCamCapability[mCameraId]->raw_dim[i].height;
1796            if (dimension >= maxProcessedDim && dimension < maxRawDim)
1797                maxRawDim = dimension;
1798        }
1799    }
1800
1801    //Find minimum durations for processed, jpeg, and raw
1802    for (size_t i = 0; i < count; i++) {
1803        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
1804                gCamCapability[mCameraId]->raw_dim[i].height) {
1805            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
1806            break;
1807        }
1808    }
1809    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1810    for (size_t i = 0; i < count; i++) {
1811        if (maxProcessedDim ==
1812                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
1813                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
1814            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1815            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
1816            break;
1817        }
1818    }
1819}
1820
1821/*===========================================================================
1822 * FUNCTION   : getMinFrameDuration
1823 *
1824 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
1825 *              and current request configuration.
1826 *
1827 * PARAMETERS : @request: requset sent by the frameworks
1828 *
1829 * RETURN     : min farme duration for a particular request
1830 *
1831 *==========================================================================*/
1832int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
1833{
1834    bool hasJpegStream = false;
1835    bool hasRawStream = false;
1836    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
1837        const camera3_stream_t *stream = request->output_buffers[i].stream;
1838        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
1839            hasJpegStream = true;
1840        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1841                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1842                stream->format == HAL_PIXEL_FORMAT_RAW16)
1843            hasRawStream = true;
1844    }
1845
1846    if (!hasJpegStream)
1847        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
1848    else
1849        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
1850}
1851
1852/*===========================================================================
1853 * FUNCTION   : handlePendingReprocResults
1854 *
1855 * DESCRIPTION: check and notify on any pending reprocess results
1856 *
1857 * PARAMETERS :
1858 *   @frame_number   : Pending request frame number
1859 *
1860 * RETURN     : int32_t type of status
1861 *              NO_ERROR  -- success
1862 *              none-zero failure code
1863 *==========================================================================*/
1864int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
1865{
1866    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
1867            j != mPendingReprocessResultList.end(); j++) {
1868        if (j->frame_number == frame_number) {
1869            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
1870
1871            CDBG("%s: Delayed reprocess notify %d", __func__,
1872                    frame_number);
1873
1874            for (List<PendingRequestInfo>::iterator k = mPendingRequestsList.begin();
1875                k != mPendingRequestsList.end(); k++) {
1876
1877                if (k->frame_number == j->frame_number) {
1878                    CDBG("%s: Found reprocess frame number %d in pending reprocess List "
1879                            "Take it out!!", __func__,
1880                            k->frame_number);
1881
1882                    camera3_capture_result result;
1883                    memset(&result, 0, sizeof(camera3_capture_result));
1884                    result.frame_number = frame_number;
1885                    result.num_output_buffers = 1;
1886                    result.output_buffers =  &j->buffer;
1887                    result.input_buffer = k->input_buffer;
1888                    result.result = k->settings;
1889                    result.partial_result = PARTIAL_RESULT_COUNT;
1890                    mCallbackOps->process_capture_result(mCallbackOps, &result);
1891
1892                    clearInputBuffer(k->input_buffer);
1893                    mPendingRequestsList.erase(k);
1894                    mPendingRequest--;
1895                    break;
1896                }
1897            }
1898            mPendingReprocessResultList.erase(j);
1899            break;
1900        }
1901    }
1902    return NO_ERROR;
1903}
1904
1905/*===========================================================================
1906 * FUNCTION   : handleBatchMetadata
1907 *
1908 * DESCRIPTION: Handles metadata buffer callback in batch mode
1909 *
1910 * PARAMETERS : @metadata_buf: metadata buffer
1911 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
1912 *                 the meta buf in this method
1913 *
1914 * RETURN     :
1915 *
1916 *==========================================================================*/
1917void QCamera3HardwareInterface::handleBatchMetadata(
1918        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
1919{
1920    ATRACE_CALL();
1921
1922    if (NULL == metadata_buf) {
1923        ALOGE("%s: metadata_buf is NULL", __func__);
1924        return;
1925    }
1926    metadata_buffer_t *metadata =
1927            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
1928    int32_t frame_number_valid, urgent_frame_number_valid;
1929    uint32_t last_frame_number, last_urgent_frame_number;
1930    uint32_t frame_number, urgent_frame_number = 0;
1931    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
1932    bool invalid_metadata = false;
1933
1934    int32_t *p_frame_number_valid =
1935            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
1936    uint32_t *p_frame_number =
1937            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
1938    int64_t *p_capture_time =
1939            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
1940    int32_t *p_urgent_frame_number_valid =
1941            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
1942    uint32_t *p_urgent_frame_number =
1943            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
1944
1945    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
1946            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
1947            (NULL == p_urgent_frame_number)) {
1948        ALOGE("%s: Invalid metadata", __func__);
1949        invalid_metadata = true;
1950    } else {
1951        frame_number_valid = *p_frame_number_valid;
1952        last_frame_number = *p_frame_number;
1953        last_frame_capture_time = *p_capture_time;
1954        urgent_frame_number_valid = *p_urgent_frame_number_valid;
1955        last_urgent_frame_number = *p_urgent_frame_number;
1956    }
1957
1958    // If reported capture_time is 0, skip handling this metadata
1959    if (!last_frame_capture_time) {
1960        goto done_batch_metadata;
1961    }
1962
1963    for (size_t i = 0; i < mBatchSize; i++) {
1964        /* handleMetadataWithLock is called even for invalid_metadata for
1965         * pipeline depth calculation */
1966        if (!invalid_metadata) {
1967            /* Infer frame number. Batch metadata contains frame number of the
1968             * last frame */
1969            if (urgent_frame_number_valid) {
1970                urgent_frame_number =
1971                        last_urgent_frame_number + 1 - mBatchSize + i;
1972                CDBG("%s: last urgent frame_number in batch: %d, "
1973                        "inferred urgent frame_number: %d",
1974                        __func__, last_urgent_frame_number, urgent_frame_number);
1975                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
1976                        CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
1977            }
1978
1979            /* Infer frame number. Batch metadata contains frame number of the
1980             * last frame */
1981            if (frame_number_valid) {
1982                frame_number = last_frame_number + 1 - mBatchSize + i;
1983                CDBG("%s: last frame_number in batch: %d, "
1984                        "inferred frame_number: %d",
1985                        __func__, last_frame_number, frame_number);
1986                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
1987                        CAM_INTF_META_FRAME_NUMBER, frame_number);
1988            }
1989
1990            //Infer timestamp
1991            first_frame_capture_time = last_frame_capture_time -
1992                    (((mBatchSize - 1) * NSEC_PER_SEC) / mHFRVideoFps);
1993            capture_time =
1994                    first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
1995            ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
1996                    CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
1997            CDBG("%s: batch capture_time: %lld, capture_time: %lld",
1998                    __func__, last_frame_capture_time, capture_time);
1999        }
2000        pthread_mutex_lock(&mMutex);
2001        handleMetadataWithLock(metadata_buf,
2002                false /* free_and_bufdone_meta_buf */);
2003        pthread_mutex_unlock(&mMutex);
2004    }
2005
2006done_batch_metadata:
2007    /* BufDone metadata buffer */
2008    if (free_and_bufdone_meta_buf) {
2009        mMetadataChannel->bufDone(metadata_buf);
2010        free(metadata_buf);
2011    }
2012}
2013
2014/*===========================================================================
2015 * FUNCTION   : handleMetadataWithLock
2016 *
2017 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2018 *
2019 * PARAMETERS : @metadata_buf: metadata buffer
2020 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2021 *                 the meta buf in this method
2022 *
2023 * RETURN     :
2024 *
2025 *==========================================================================*/
2026void QCamera3HardwareInterface::handleMetadataWithLock(
2027    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2028{
2029    ATRACE_CALL();
2030
2031    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2032    int32_t frame_number_valid, urgent_frame_number_valid;
2033    uint32_t frame_number, urgent_frame_number;
2034    int64_t capture_time;
2035
2036    int32_t *p_frame_number_valid =
2037            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2038    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2039    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2040    int32_t *p_urgent_frame_number_valid =
2041            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2042    uint32_t *p_urgent_frame_number =
2043            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2044    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2045            metadata) {
2046        CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2047                __func__, *p_frame_number_valid, *p_frame_number);
2048    }
2049
2050    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2051            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2052        ALOGE("%s: Invalid metadata", __func__);
2053        if (free_and_bufdone_meta_buf) {
2054            mMetadataChannel->bufDone(metadata_buf);
2055            free(metadata_buf);
2056        }
2057        goto done_metadata;
2058    } else {
2059        frame_number_valid = *p_frame_number_valid;
2060        frame_number = *p_frame_number;
2061        capture_time = *p_capture_time;
2062        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2063        urgent_frame_number = *p_urgent_frame_number;
2064    }
2065    //Partial result on process_capture_result for timestamp
2066    if (urgent_frame_number_valid) {
2067        CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2068          __func__, urgent_frame_number, capture_time);
2069
2070        //Recieved an urgent Frame Number, handle it
2071        //using partial results
2072        for (List<PendingRequestInfo>::iterator i =
2073            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2074            CDBG("%s: Iterator Frame = %d urgent frame = %d",
2075                __func__, i->frame_number, urgent_frame_number);
2076
2077            if (i->frame_number < urgent_frame_number &&
2078                i->partial_result_cnt == 0) {
2079                ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2080                    __func__, i->frame_number);
2081            }
2082
2083            if (i->frame_number == urgent_frame_number &&
2084                     i->bUrgentReceived == 0) {
2085
2086                camera3_capture_result_t result;
2087                memset(&result, 0, sizeof(camera3_capture_result_t));
2088
2089                i->partial_result_cnt++;
2090                i->bUrgentReceived = 1;
2091                // Extract 3A metadata
2092                result.result =
2093                    translateCbUrgentMetadataToResultMetadata(metadata);
2094                // Populate metadata result
2095                result.frame_number = urgent_frame_number;
2096                result.num_output_buffers = 0;
2097                result.output_buffers = NULL;
2098                result.partial_result = i->partial_result_cnt;
2099
2100                mCallbackOps->process_capture_result(mCallbackOps, &result);
2101                CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2102                     __func__, result.frame_number, capture_time);
2103                free_camera_metadata((camera_metadata_t *)result.result);
2104                break;
2105            }
2106        }
2107    }
2108
2109    if (!frame_number_valid) {
2110        CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2111        if (free_and_bufdone_meta_buf) {
2112            mMetadataChannel->bufDone(metadata_buf);
2113            free(metadata_buf);
2114        }
2115        goto done_metadata;
2116    }
2117    CDBG("%s: valid frame_number = %u, capture_time = %lld", __func__,
2118            frame_number, capture_time);
2119
2120    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2121        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2122        camera3_capture_result_t result;
2123        memset(&result, 0, sizeof(camera3_capture_result_t));
2124
2125        CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2126        i->partial_result_cnt++;
2127        result.partial_result = i->partial_result_cnt;
2128
2129        // Flush out all entries with less or equal frame numbers.
2130        mPendingRequest--;
2131
2132        // Check whether any stream buffer corresponding to this is dropped or not
2133        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2134        // The API does not expect a blob buffer to be dropped
2135        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2136            /* Clear notify_msg structure */
2137            camera3_notify_msg_t notify_msg;
2138            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2139            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2140                    j != i->buffers.end(); j++) {
2141               if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2142                   QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
2143                   uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2144                   for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2145                       if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2146                           // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2147                           CDBG("%s: Start of reporting error frame#=%u, streamID=%u",
2148                                   __func__, i->frame_number, streamID);
2149                           notify_msg.type = CAMERA3_MSG_ERROR;
2150                           notify_msg.message.error.frame_number = i->frame_number;
2151                           notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2152                           notify_msg.message.error.error_stream = j->stream;
2153                           mCallbackOps->notify(mCallbackOps, &notify_msg);
2154                           CDBG("%s: End of reporting error frame#=%u, streamID=%u",
2155                                  __func__, i->frame_number, streamID);
2156                           PendingFrameDropInfo PendingFrameDrop;
2157                           PendingFrameDrop.frame_number=i->frame_number;
2158                           PendingFrameDrop.stream_ID = streamID;
2159                           // Add the Frame drop info to mPendingFrameDropList
2160                           mPendingFrameDropList.push_back(PendingFrameDrop);
2161                      }
2162                   }
2163               }
2164            }
2165        }
2166
2167        //TODO: batch handling for dropped metadata
2168
2169        // Send empty metadata with already filled buffers for dropped metadata
2170        // and send valid metadata with already filled buffers for current metadata
2171        if (i->frame_number < frame_number) {
2172            /* Clear notify_msg structure */
2173            camera3_notify_msg_t notify_msg;
2174            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2175
2176            notify_msg.type = CAMERA3_MSG_SHUTTER;
2177            notify_msg.message.shutter.frame_number = i->frame_number;
2178            notify_msg.message.shutter.timestamp = (uint64_t)capture_time -
2179                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
2180            mCallbackOps->notify(mCallbackOps, &notify_msg);
2181            i->timestamp = (nsecs_t)notify_msg.message.shutter.timestamp;
2182            CDBG("%s: Support notification !!!! notify frame_number = %u, capture_time = %llu",
2183                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2184
2185            CameraMetadata dummyMetadata;
2186            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
2187                    &i->timestamp, 1);
2188            dummyMetadata.update(ANDROID_REQUEST_ID,
2189                    &(i->request_id), 1);
2190            result.result = dummyMetadata.release();
2191        } else {
2192            /* Clear notify_msg structure */
2193            camera3_notify_msg_t notify_msg;
2194            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2195
2196            // Send shutter notify to frameworks
2197            notify_msg.type = CAMERA3_MSG_SHUTTER;
2198            notify_msg.message.shutter.frame_number = i->frame_number;
2199            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2200            mCallbackOps->notify(mCallbackOps, &notify_msg);
2201
2202            i->timestamp = capture_time;
2203
2204            result.result = translateFromHalMetadata(metadata,
2205                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2206                    i->capture_intent);
2207
2208            saveExifParams(metadata);
2209
2210            if (i->blob_request) {
2211                {
2212                    //Dump tuning metadata if enabled and available
2213                    char prop[PROPERTY_VALUE_MAX];
2214                    memset(prop, 0, sizeof(prop));
2215                    property_get("persist.camera.dumpmetadata", prop, "0");
2216                    int32_t enabled = atoi(prop);
2217                    if (enabled && metadata->is_tuning_params_valid) {
2218                        dumpMetadataToFile(metadata->tuning_params,
2219                               mMetaFrameCount,
2220                               enabled,
2221                               "Snapshot",
2222                               frame_number);
2223                    }
2224                }
2225
2226
2227                mPictureChannel->queueReprocMetadata(metadata_buf);
2228            } else {
2229                // Return metadata buffer
2230                if (free_and_bufdone_meta_buf) {
2231                    mMetadataChannel->bufDone(metadata_buf);
2232                    free(metadata_buf);
2233                }
2234            }
2235        }
2236        if (!result.result) {
2237            ALOGE("%s: metadata is NULL", __func__);
2238        }
2239        result.frame_number = i->frame_number;
2240        result.input_buffer = i->input_buffer;
2241        result.num_output_buffers = 0;
2242        result.output_buffers = NULL;
2243        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2244                    j != i->buffers.end(); j++) {
2245            if (j->buffer) {
2246                result.num_output_buffers++;
2247            }
2248        }
2249
2250        if (result.num_output_buffers > 0) {
2251            camera3_stream_buffer_t *result_buffers =
2252                new camera3_stream_buffer_t[result.num_output_buffers];
2253            if (!result_buffers) {
2254                ALOGE("%s: Fatal error: out of memory", __func__);
2255            }
2256            size_t result_buffers_idx = 0;
2257            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2258                    j != i->buffers.end(); j++) {
2259                if (j->buffer) {
2260                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2261                            m != mPendingFrameDropList.end(); m++) {
2262                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2263                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2264                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2265                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2266                            CDBG("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2267                                  __func__, frame_number, streamID);
2268                            m = mPendingFrameDropList.erase(m);
2269                            break;
2270                        }
2271                    }
2272
2273                    for (List<PendingBufferInfo>::iterator k =
2274                      mPendingBuffersMap.mPendingBufferList.begin();
2275                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2276                      if (k->buffer == j->buffer->buffer) {
2277                        CDBG("%s: Found buffer %p in pending buffer List "
2278                              "for frame %u, Take it out!!", __func__,
2279                               k->buffer, k->frame_number);
2280                        mPendingBuffersMap.num_buffers--;
2281                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2282                        break;
2283                      }
2284                    }
2285
2286                    result_buffers[result_buffers_idx++] = *(j->buffer);
2287                    free(j->buffer);
2288                    j->buffer = NULL;
2289                }
2290            }
2291            result.output_buffers = result_buffers;
2292            mCallbackOps->process_capture_result(mCallbackOps, &result);
2293            CDBG("%s: meta frame_number = %u, capture_time = %lld",
2294                    __func__, result.frame_number, i->timestamp);
2295            free_camera_metadata((camera_metadata_t *)result.result);
2296            delete[] result_buffers;
2297        } else {
2298            mCallbackOps->process_capture_result(mCallbackOps, &result);
2299            CDBG("%s: meta frame_number = %u, capture_time = %lld",
2300                        __func__, result.frame_number, i->timestamp);
2301            free_camera_metadata((camera_metadata_t *)result.result);
2302        }
2303        // erase the element from the list
2304        clearInputBuffer(i->input_buffer);
2305        i = mPendingRequestsList.erase(i);
2306
2307        if (!mPendingReprocessResultList.empty()) {
2308            handlePendingReprocResults(frame_number + 1);
2309        }
2310    }
2311
2312done_metadata:
2313    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2314        i != mPendingRequestsList.end() ;i++) {
2315        i->pipeline_depth++;
2316    }
2317    unblockRequestIfNecessary();
2318
2319}
2320
2321/*===========================================================================
2322 * FUNCTION   : handleBufferWithLock
2323 *
2324 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2325 *
2326 * PARAMETERS : @buffer: image buffer for the callback
2327 *              @frame_number: frame number of the image buffer
2328 *
2329 * RETURN     :
2330 *
2331 *==========================================================================*/
2332void QCamera3HardwareInterface::handleBufferWithLock(
2333    camera3_stream_buffer_t *buffer, uint32_t frame_number)
2334{
2335    ATRACE_CALL();
2336    // If the frame number doesn't exist in the pending request list,
2337    // directly send the buffer to the frameworks, and update pending buffers map
2338    // Otherwise, book-keep the buffer.
2339    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
2340    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2341        i++;
2342    }
2343    if (i == mPendingRequestsList.end()) {
2344        // Verify all pending requests frame_numbers are greater
2345        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
2346                j != mPendingRequestsList.end(); j++) {
2347            if (j->frame_number < frame_number) {
2348                ALOGE("%s: Error: pending frame number %d is smaller than %d",
2349                        __func__, j->frame_number, frame_number);
2350            }
2351        }
2352        camera3_capture_result_t result;
2353        memset(&result, 0, sizeof(camera3_capture_result_t));
2354        result.result = NULL;
2355        result.frame_number = frame_number;
2356        result.num_output_buffers = 1;
2357        result.partial_result = 0;
2358        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2359                m != mPendingFrameDropList.end(); m++) {
2360            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
2361            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2362            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
2363                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2364                CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
2365                        __func__, frame_number, streamID);
2366                m = mPendingFrameDropList.erase(m);
2367                break;
2368            }
2369        }
2370        result.output_buffers = buffer;
2371        CDBG("%s: result frame_number = %d, buffer = %p",
2372                __func__, frame_number, buffer->buffer);
2373
2374        for (List<PendingBufferInfo>::iterator k =
2375                mPendingBuffersMap.mPendingBufferList.begin();
2376                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2377            if (k->buffer == buffer->buffer) {
2378                CDBG("%s: Found Frame buffer, take it out from list",
2379                        __func__);
2380
2381                mPendingBuffersMap.num_buffers--;
2382                k = mPendingBuffersMap.mPendingBufferList.erase(k);
2383                break;
2384            }
2385        }
2386        CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2387            __func__, mPendingBuffersMap.num_buffers);
2388
2389        mCallbackOps->process_capture_result(mCallbackOps, &result);
2390    } else {
2391        if (i->input_buffer) {
2392            CameraMetadata settings;
2393            camera3_notify_msg_t notify_msg;
2394            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2395            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2396            if(i->settings) {
2397                settings = i->settings;
2398                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2399                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2400                } else {
2401                    ALOGE("%s: No timestamp in input settings! Using current one.",
2402                            __func__);
2403                }
2404            } else {
2405                ALOGE("%s: Input settings missing!", __func__);
2406            }
2407
2408            notify_msg.type = CAMERA3_MSG_SHUTTER;
2409            notify_msg.message.shutter.frame_number = frame_number;
2410            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2411
2412            sp<Fence> releaseFence = new Fence(i->input_buffer->release_fence);
2413            int32_t rc = releaseFence->wait(Fence::TIMEOUT_NEVER);
2414            if (rc != OK) {
2415                ALOGE("%s: input buffer fence wait failed %d", __func__, rc);
2416            }
2417
2418            for (List<PendingBufferInfo>::iterator k =
2419                    mPendingBuffersMap.mPendingBufferList.begin();
2420                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2421                if (k->buffer == buffer->buffer) {
2422                    CDBG("%s: Found Frame buffer, take it out from list",
2423                            __func__);
2424
2425                    mPendingBuffersMap.num_buffers--;
2426                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
2427                    break;
2428                }
2429            }
2430            CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2431                __func__, mPendingBuffersMap.num_buffers);
2432
2433            bool notifyNow = true;
2434            for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
2435                    j != mPendingRequestsList.end(); j++) {
2436                if (j->frame_number < frame_number) {
2437                    notifyNow = false;
2438                    break;
2439                }
2440            }
2441
2442            if (notifyNow) {
2443                camera3_capture_result result;
2444                memset(&result, 0, sizeof(camera3_capture_result));
2445                result.frame_number = frame_number;
2446                result.result = i->settings;
2447                result.input_buffer = i->input_buffer;
2448                result.num_output_buffers = 1;
2449                result.output_buffers = buffer;
2450                result.partial_result = PARTIAL_RESULT_COUNT;
2451
2452                mCallbackOps->notify(mCallbackOps, &notify_msg);
2453                mCallbackOps->process_capture_result(mCallbackOps, &result);
2454                CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2455                clearInputBuffer(i->input_buffer);
2456                i = mPendingRequestsList.erase(i);
2457                mPendingRequest--;
2458            } else {
2459                // Cache reprocess result for later
2460                PendingReprocessResult pendingResult;
2461                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2462                pendingResult.notify_msg = notify_msg;
2463                pendingResult.buffer = *buffer;
2464                pendingResult.frame_number = frame_number;
2465                mPendingReprocessResultList.push_back(pendingResult);
2466                CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2467            }
2468        } else {
2469            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2470                j != i->buffers.end(); j++) {
2471                if (j->stream == buffer->stream) {
2472                    if (j->buffer != NULL) {
2473                        ALOGE("%s: Error: buffer is already set", __func__);
2474                    } else {
2475                        j->buffer = (camera3_stream_buffer_t *)malloc(
2476                            sizeof(camera3_stream_buffer_t));
2477                        *(j->buffer) = *buffer;
2478                        CDBG("%s: cache buffer %p at result frame_number %d",
2479                            __func__, buffer, frame_number);
2480                    }
2481                }
2482            }
2483        }
2484    }
2485}
2486
2487/*===========================================================================
2488 * FUNCTION   : unblockRequestIfNecessary
2489 *
2490 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2491 *              that mMutex is held when this function is called.
2492 *
2493 * PARAMETERS :
2494 *
2495 * RETURN     :
2496 *
2497 *==========================================================================*/
2498void QCamera3HardwareInterface::unblockRequestIfNecessary()
2499{
2500   // Unblock process_capture_request
2501   pthread_cond_signal(&mRequestCond);
2502}
2503
2504/*===========================================================================
2505 * FUNCTION   : processCaptureRequest
2506 *
2507 * DESCRIPTION: process a capture request from camera service
2508 *
2509 * PARAMETERS :
2510 *   @request : request from framework to process
2511 *
2512 * RETURN     :
2513 *
2514 *==========================================================================*/
2515int QCamera3HardwareInterface::processCaptureRequest(
2516                    camera3_capture_request_t *request)
2517{
2518    ATRACE_CALL();
2519    int rc = NO_ERROR;
2520    int32_t request_id;
2521    CameraMetadata meta;
2522    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
2523    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
2524    bool isVidBufRequested = false;
2525    camera3_stream_buffer_t *pInputBuffer;
2526
2527    pthread_mutex_lock(&mMutex);
2528
2529    rc = validateCaptureRequest(request);
2530    if (rc != NO_ERROR) {
2531        ALOGE("%s: incoming request is not valid", __func__);
2532        pthread_mutex_unlock(&mMutex);
2533        return rc;
2534    }
2535
2536    meta = request->settings;
2537
2538    // For first capture request, send capture intent, and
2539    // stream on all streams
2540    if (mFirstRequest) {
2541        // send an unconfigure to the backend so that the isp
2542        // resources are deallocated
2543        if (!mFirstConfiguration) {
2544            cam_stream_size_info_t stream_config_info;
2545            int32_t hal_version = CAM_HAL_V3;
2546            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
2547            stream_config_info.buffer_info.min_buffers =
2548                    MIN_INFLIGHT_REQUESTS;
2549            stream_config_info.buffer_info.max_buffers =
2550                    MAX_INFLIGHT_REQUESTS;
2551            clear_metadata_buffer(mParameters);
2552            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2553                    CAM_INTF_PARM_HAL_VERSION, hal_version);
2554            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2555                    CAM_INTF_META_STREAM_INFO, stream_config_info);
2556            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2557                    mParameters);
2558            if (rc < 0) {
2559                ALOGE("%s: set_parms for unconfigure failed", __func__);
2560                pthread_mutex_unlock(&mMutex);
2561                return rc;
2562            }
2563        }
2564
2565        /* get eis information for stream configuration */
2566        cam_is_type_t is_type;
2567        char is_type_value[PROPERTY_VALUE_MAX];
2568        property_get("camera.is_type", is_type_value, "0");
2569        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2570
2571        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2572            int32_t hal_version = CAM_HAL_V3;
2573            uint8_t captureIntent =
2574                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2575            mCaptureIntent = captureIntent;
2576            clear_metadata_buffer(mParameters);
2577            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
2578            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
2579        }
2580
2581        //If EIS is enabled, turn it on for video
2582        bool setEis = m_bEisEnable && m_bEisSupportedSize &&
2583            ((mCaptureIntent ==  CAMERA3_TEMPLATE_VIDEO_RECORD) ||
2584             (mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT));
2585        int32_t vsMode;
2586        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
2587        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
2588            rc = BAD_VALUE;
2589        }
2590
2591        //IS type will be 0 unless EIS is supported. If EIS is supported
2592        //it could either be 1 or 4 depending on the stream and video size
2593        if (setEis){
2594            if (!m_bEisSupportedSize) {
2595                is_type = IS_TYPE_DIS;
2596            } else {
2597                is_type = IS_TYPE_EIS_2_0;
2598            }
2599        }
2600
2601        if (mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_RECORD) {
2602            mStreamConfigInfo.is_type = is_type;
2603        } else {
2604            mStreamConfigInfo.is_type = IS_TYPE_NONE;
2605        }
2606
2607        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2608                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
2609        int32_t tintless_value = 1;
2610        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2611                CAM_INTF_PARM_TINTLESS, tintless_value);
2612
2613        setMobicat();
2614
2615        /* Set fps and hfr mode while sending meta stream info so that sensor
2616         * can configure appropriate streaming mode */
2617        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2618            rc = setHalFpsRange(meta, mParameters);
2619            if (rc != NO_ERROR) {
2620                ALOGE("%s: setHalFpsRange failed", __func__);
2621            }
2622        }
2623        if (meta.exists(ANDROID_CONTROL_MODE)) {
2624            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
2625            rc = extractSceneMode(meta, metaMode, mParameters);
2626            if (rc != NO_ERROR) {
2627                ALOGE("%s: extractSceneMode failed", __func__);
2628            }
2629        }
2630
2631        //TODO: validate the arguments, HSV scenemode should have only the
2632        //advertised fps ranges
2633
2634        /*set the capture intent, hal version, tintless, stream info,
2635         *and disenable parameters to the backend*/
2636        CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
2637        mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2638                    mParameters);
2639
2640        cam_dimension_t sensor_dim;
2641        memset(&sensor_dim, 0, sizeof(sensor_dim));
2642        rc = getSensorOutputSize(sensor_dim);
2643        if (rc != NO_ERROR) {
2644            ALOGE("%s: Failed to get sensor output size", __func__);
2645            pthread_mutex_unlock(&mMutex);
2646            return rc;
2647        }
2648
2649        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
2650                gCamCapability[mCameraId]->active_array_size.height,
2651                sensor_dim.width, sensor_dim.height);
2652
2653        /* Set batchmode before initializing channel. Since registerBuffer
2654         * internally initializes some of the channels, better set batchmode
2655         * even before first register buffer */
2656        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2657            it != mStreamInfo.end(); it++) {
2658            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2659            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
2660                    && mBatchSize) {
2661                rc = channel->setBatchSize(mBatchSize);
2662                //Disable per frame map unmap for HFR/batchmode case
2663                rc |= channel->setPerFrameMapUnmap(false);
2664                if (NO_ERROR != rc) {
2665                    ALOGE("%s : Channel init failed %d", __func__, rc);
2666                    pthread_mutex_unlock(&mMutex);
2667                    return rc;
2668                }
2669            }
2670        }
2671
2672        for (size_t i = 0; i < request->num_output_buffers; i++) {
2673            const camera3_stream_buffer_t& output = request->output_buffers[i];
2674            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2675            /*for livesnapshot stream is_type will be DIS*/
2676            if (setEis && output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2677                rc = channel->registerBuffer(output.buffer, IS_TYPE_DIS);
2678            } else {
2679                rc = channel->registerBuffer(output.buffer, is_type);
2680            }
2681            if (rc < 0) {
2682                ALOGE("%s: registerBuffer failed",
2683                        __func__);
2684                pthread_mutex_unlock(&mMutex);
2685                return -ENODEV;
2686            }
2687        }
2688
2689        //First initialize all streams
2690        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2691            it != mStreamInfo.end(); it++) {
2692            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2693            if (setEis && (*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2694                rc = channel->initialize(IS_TYPE_DIS);
2695            } else {
2696                rc = channel->initialize(is_type);
2697            }
2698            if (NO_ERROR != rc) {
2699                ALOGE("%s : Channel initialization failed %d", __func__, rc);
2700                pthread_mutex_unlock(&mMutex);
2701                return rc;
2702            }
2703        }
2704
2705        if (mRawDumpChannel) {
2706            rc = mRawDumpChannel->initialize(is_type);
2707            if (rc != NO_ERROR) {
2708                ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
2709                pthread_mutex_unlock(&mMutex);
2710                return rc;
2711            }
2712        }
2713        if (mSupportChannel) {
2714            rc = mSupportChannel->initialize(is_type);
2715            if (rc < 0) {
2716                ALOGE("%s: Support channel initialization failed", __func__);
2717                pthread_mutex_unlock(&mMutex);
2718                return rc;
2719            }
2720        }
2721        if (mAnalysisChannel) {
2722            rc = mAnalysisChannel->initialize(is_type);
2723            if (rc < 0) {
2724                ALOGE("%s: Analysis channel initialization failed", __func__);
2725                pthread_mutex_unlock(&mMutex);
2726                return rc;
2727            }
2728        }
2729
2730        //Then start them.
2731        CDBG_HIGH("%s: Start META Channel", __func__);
2732        rc = mMetadataChannel->start();
2733        if (rc < 0) {
2734            ALOGE("%s: META channel start failed", __func__);
2735            pthread_mutex_unlock(&mMutex);
2736            return rc;
2737        }
2738
2739        if (mAnalysisChannel) {
2740            rc = mAnalysisChannel->start();
2741            if (rc < 0) {
2742                ALOGE("%s: Analysis channel start failed", __func__);
2743                mMetadataChannel->stop();
2744                pthread_mutex_unlock(&mMutex);
2745                return rc;
2746            }
2747        }
2748
2749        if (mSupportChannel) {
2750            rc = mSupportChannel->start();
2751            if (rc < 0) {
2752                ALOGE("%s: Support channel start failed", __func__);
2753                mMetadataChannel->stop();
2754                /* Although support and analysis are mutually exclusive today
2755                   adding it in anycase for future proofing */
2756                if (mAnalysisChannel) {
2757                    mAnalysisChannel->stop();
2758                }
2759                pthread_mutex_unlock(&mMutex);
2760                return rc;
2761            }
2762        }
2763        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2764            it != mStreamInfo.end(); it++) {
2765            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2766            CDBG_HIGH("%s: Start Processing Channel mask=%d",
2767                    __func__, channel->getStreamTypeMask());
2768            rc = channel->start();
2769            if (rc < 0) {
2770                ALOGE("%s: channel start failed", __func__);
2771                pthread_mutex_unlock(&mMutex);
2772                return rc;
2773            }
2774        }
2775
2776        if (mRawDumpChannel) {
2777            CDBG("%s: Starting raw dump stream",__func__);
2778            rc = mRawDumpChannel->start();
2779            if (rc != NO_ERROR) {
2780                ALOGE("%s: Error Starting Raw Dump Channel", __func__);
2781                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2782                      it != mStreamInfo.end(); it++) {
2783                    QCamera3Channel *channel =
2784                        (QCamera3Channel *)(*it)->stream->priv;
2785                    ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
2786                        channel->getStreamTypeMask());
2787                    channel->stop();
2788                }
2789                if (mSupportChannel)
2790                    mSupportChannel->stop();
2791                if (mAnalysisChannel) {
2792                    mAnalysisChannel->stop();
2793                }
2794                mMetadataChannel->stop();
2795                pthread_mutex_unlock(&mMutex);
2796                return rc;
2797            }
2798        }
2799        mWokenUpByDaemon = false;
2800        mPendingRequest = 0;
2801        mFirstConfiguration = false;
2802        mBatchStreamID.num_streams = 0;
2803    }
2804
2805    uint32_t frameNumber = request->frame_number;
2806    cam_stream_ID_t streamID;
2807
2808    if (meta.exists(ANDROID_REQUEST_ID)) {
2809        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
2810        mCurrentRequestId = request_id;
2811        CDBG("%s: Received request with id: %d",__func__, request_id);
2812    } else if (mFirstRequest || mCurrentRequestId == -1){
2813        ALOGE("%s: Unable to find request id field, \
2814                & no previous id available", __func__);
2815        pthread_mutex_unlock(&mMutex);
2816        return NAME_NOT_FOUND;
2817    } else {
2818        CDBG("%s: Re-using old request id", __func__);
2819        request_id = mCurrentRequestId;
2820    }
2821
2822    CDBG("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
2823                                    __func__, __LINE__,
2824                                    request->num_output_buffers,
2825                                    request->input_buffer,
2826                                    frameNumber);
2827    // Acquire all request buffers first
2828    streamID.num_streams = 0;
2829    int blob_request = 0;
2830    uint32_t snapshotStreamId = 0;
2831    for (size_t i = 0; i < request->num_output_buffers; i++) {
2832        const camera3_stream_buffer_t& output = request->output_buffers[i];
2833        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2834        sp<Fence> acquireFence = new Fence(output.acquire_fence);
2835
2836        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2837            //Call function to store local copy of jpeg data for encode params.
2838            blob_request = 1;
2839            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
2840        }
2841
2842        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
2843        if (rc != OK) {
2844            ALOGE("%s: fence wait failed %d", __func__, rc);
2845            pthread_mutex_unlock(&mMutex);
2846            return rc;
2847        }
2848
2849        streamID.streamID[streamID.num_streams] =
2850            channel->getStreamID(channel->getStreamTypeMask());
2851        streamID.num_streams++;
2852
2853        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
2854            isVidBufRequested = true;
2855        }
2856    }
2857
2858    if (blob_request && mRawDumpChannel) {
2859        CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
2860        streamID.streamID[streamID.num_streams] =
2861            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
2862        streamID.num_streams++;
2863    }
2864
2865    if(request->input_buffer == NULL) {
2866        /* Parse the settings:
2867         * - For every request in NORMAL MODE
2868         * - For every request in HFR mode during preview only case
2869         * - For first request of every batch in HFR mode during video
2870         * recording. In batchmode the same settings except frame number is
2871         * repeated in each request of the batch.
2872         */
2873        if (!mBatchSize ||
2874           (mBatchSize && !isVidBufRequested) ||
2875           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
2876            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
2877            if (rc < 0) {
2878                ALOGE("%s: fail to set frame parameters", __func__);
2879                pthread_mutex_unlock(&mMutex);
2880                return rc;
2881            }
2882        }
2883        /* For batchMode HFR, setFrameParameters is not called for every
2884         * request. But only frame number of the latest request is parsed */
2885        if (mBatchSize && ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2886                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
2887            ALOGE("%s: Failed to set the frame number in the parameters", __func__);
2888            return BAD_VALUE;
2889        }
2890    } else {
2891        sp<Fence> acquireFence = new Fence(request->input_buffer->acquire_fence);
2892
2893        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
2894        if (rc != OK) {
2895            ALOGE("%s: input buffer fence wait failed %d", __func__, rc);
2896            pthread_mutex_unlock(&mMutex);
2897            return rc;
2898        }
2899    }
2900
2901    /* Update pending request list and pending buffers map */
2902    PendingRequestInfo pendingRequest;
2903    pendingRequest.frame_number = frameNumber;
2904    pendingRequest.num_buffers = request->num_output_buffers;
2905    pendingRequest.request_id = request_id;
2906    pendingRequest.blob_request = blob_request;
2907    pendingRequest.timestamp = 0;
2908    pendingRequest.bUrgentReceived = 0;
2909    if (request->input_buffer) {
2910        pendingRequest.input_buffer =
2911                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
2912        memcpy(pendingRequest.input_buffer, request->input_buffer, sizeof(camera3_stream_buffer_t));
2913        pInputBuffer = pendingRequest.input_buffer;
2914    } else {
2915       pendingRequest.input_buffer = NULL;
2916       pInputBuffer = NULL;
2917    }
2918    pendingRequest.settings = request->settings;
2919    pendingRequest.pipeline_depth = 0;
2920    pendingRequest.partial_result_cnt = 0;
2921    extractJpegMetadata(pendingRequest.jpegMetadata, request);
2922
2923    //extract capture intent
2924    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2925        mCaptureIntent =
2926                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2927    }
2928    pendingRequest.capture_intent = mCaptureIntent;
2929
2930    for (size_t i = 0; i < request->num_output_buffers; i++) {
2931        RequestedBufferInfo requestedBuf;
2932        requestedBuf.stream = request->output_buffers[i].stream;
2933        requestedBuf.buffer = NULL;
2934        pendingRequest.buffers.push_back(requestedBuf);
2935
2936        // Add to buffer handle the pending buffers list
2937        PendingBufferInfo bufferInfo;
2938        bufferInfo.frame_number = frameNumber;
2939        bufferInfo.buffer = request->output_buffers[i].buffer;
2940        bufferInfo.stream = request->output_buffers[i].stream;
2941        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
2942        mPendingBuffersMap.num_buffers++;
2943        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
2944        CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
2945                __func__, frameNumber, bufferInfo.buffer,
2946                channel->getStreamTypeMask(), bufferInfo.stream->format);
2947    }
2948    CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2949          __func__, mPendingBuffersMap.num_buffers);
2950
2951    mPendingRequestsList.push_back(pendingRequest);
2952
2953    if(mFlush) {
2954        pthread_mutex_unlock(&mMutex);
2955        return NO_ERROR;
2956    }
2957
2958    // Notify metadata channel we receive a request
2959    mMetadataChannel->request(NULL, frameNumber);
2960
2961    if(request->input_buffer != NULL){
2962        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
2963        if (NO_ERROR != rc) {
2964            ALOGE("%s: fail to set reproc parameters", __func__);
2965            pthread_mutex_unlock(&mMutex);
2966            return rc;
2967        }
2968    }
2969
2970    // Call request on other streams
2971    for (size_t i = 0; i < request->num_output_buffers; i++) {
2972        const camera3_stream_buffer_t& output = request->output_buffers[i];
2973        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2974
2975        if (channel == NULL) {
2976            ALOGE("%s: invalid channel pointer for stream", __func__);
2977            continue;
2978        }
2979
2980        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
2981            if(request->input_buffer != NULL){
2982                rc = channel->request(output.buffer, frameNumber,
2983                        pInputBuffer, &mReprocMeta);
2984                if (rc < 0) {
2985                    ALOGE("%s: Fail to request on picture channel", __func__);
2986                    pthread_mutex_unlock(&mMutex);
2987                    return rc;
2988                }
2989            } else {
2990                CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
2991                        __LINE__, output.buffer, frameNumber);
2992                if (!request->settings) {
2993                    rc = channel->request(output.buffer, frameNumber,
2994                            NULL, mPrevParameters);
2995                } else {
2996                    rc = channel->request(output.buffer, frameNumber,
2997                            NULL, mParameters);
2998                }
2999                if (rc < 0) {
3000                    ALOGE("%s: Fail to request on picture channel", __func__);
3001                    pthread_mutex_unlock(&mMutex);
3002                    return rc;
3003                }
3004            }
3005        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3006            rc = channel->request(output.buffer, frameNumber,
3007                    pInputBuffer,
3008                    pInputBuffer? &mReprocMeta : mParameters);
3009            if (rc < 0) {
3010                ALOGE("%s: Fail to request on YUV channel", __func__);
3011                pthread_mutex_unlock(&mMutex);
3012                return rc;
3013            }
3014        } else {
3015            CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3016                __LINE__, output.buffer, frameNumber);
3017            rc = channel->request(output.buffer, frameNumber);
3018            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3019                    && mBatchSize) {
3020                mToBeQueuedVidBufs++;
3021                if (mToBeQueuedVidBufs == mBatchSize) {
3022                    channel->queueBatchBuf();
3023                }
3024            }
3025            if (rc < 0) {
3026                ALOGE("%s: request failed", __func__);
3027                pthread_mutex_unlock(&mMutex);
3028                return rc;
3029            }
3030        }
3031    }
3032
3033    if(request->input_buffer == NULL) {
3034        /* Set the parameters to backend:
3035         * - For every request in NORMAL MODE
3036         * - For every request in HFR mode during preview only case
3037         * - Once every batch in HFR mode during video recording
3038         */
3039        if (!mBatchSize ||
3040           (mBatchSize && !isVidBufRequested) ||
3041           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3042            CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3043                    __func__, mBatchSize, isVidBufRequested,
3044                    mToBeQueuedVidBufs);
3045            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3046                    mParameters);
3047            if (rc < 0) {
3048                ALOGE("%s: set_parms failed", __func__);
3049            }
3050            /* reset to zero coz, the batch is queued */
3051            mToBeQueuedVidBufs = 0;
3052        }
3053    }
3054
3055    mFirstRequest = false;
3056    // Added a timed condition wait
3057    struct timespec ts;
3058    uint8_t isValidTimeout = 1;
3059    rc = clock_gettime(CLOCK_REALTIME, &ts);
3060    if (rc < 0) {
3061      isValidTimeout = 0;
3062      ALOGE("%s: Error reading the real time clock!!", __func__);
3063    }
3064    else {
3065      // Make timeout as 5 sec for request to be honored
3066      ts.tv_sec += 5;
3067    }
3068    //Block on conditional variable
3069
3070    mPendingRequest++;
3071    if (mBatchSize) {
3072        /* For HFR, more buffers are dequeued upfront to improve the performance */
3073        minInFlightRequests = (MIN_INFLIGHT_REQUESTS + 1) * mBatchSize;
3074        maxInFlightRequests = MAX_INFLIGHT_REQUESTS * mBatchSize;
3075    }
3076    while (mPendingRequest >= minInFlightRequests) {
3077        if (!isValidTimeout) {
3078            CDBG("%s: Blocking on conditional wait", __func__);
3079            pthread_cond_wait(&mRequestCond, &mMutex);
3080        }
3081        else {
3082            CDBG("%s: Blocking on timed conditional wait", __func__);
3083            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3084            if (rc == ETIMEDOUT) {
3085                rc = -ENODEV;
3086                ALOGE("%s: Unblocked on timeout!!!!", __func__);
3087                break;
3088            }
3089        }
3090        CDBG("%s: Unblocked", __func__);
3091        if (mWokenUpByDaemon) {
3092            mWokenUpByDaemon = false;
3093            if (mPendingRequest < maxInFlightRequests)
3094                break;
3095        }
3096    }
3097    pthread_mutex_unlock(&mMutex);
3098
3099    return rc;
3100}
3101
3102/*===========================================================================
3103 * FUNCTION   : dump
3104 *
3105 * DESCRIPTION:
3106 *
3107 * PARAMETERS :
3108 *
3109 *
3110 * RETURN     :
3111 *==========================================================================*/
3112void QCamera3HardwareInterface::dump(int fd)
3113{
3114    pthread_mutex_lock(&mMutex);
3115    dprintf(fd, "\n Camera HAL3 information Begin \n");
3116
3117    dprintf(fd, "\nNumber of pending requests: %zu \n",
3118        mPendingRequestsList.size());
3119    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3120    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3121    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3122    for(List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
3123        i != mPendingRequestsList.end(); i++) {
3124        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3125        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3126        i->input_buffer);
3127    }
3128    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3129                mPendingBuffersMap.num_buffers);
3130    dprintf(fd, "-------+------------------\n");
3131    dprintf(fd, " Frame | Stream type mask \n");
3132    dprintf(fd, "-------+------------------\n");
3133    for(List<PendingBufferInfo>::iterator i =
3134        mPendingBuffersMap.mPendingBufferList.begin();
3135        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3136        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3137        dprintf(fd, " %5d | %11d \n",
3138                i->frame_number, channel->getStreamTypeMask());
3139    }
3140    dprintf(fd, "-------+------------------\n");
3141
3142    dprintf(fd, "\nPending frame drop list: %zu\n",
3143        mPendingFrameDropList.size());
3144    dprintf(fd, "-------+-----------\n");
3145    dprintf(fd, " Frame | Stream ID \n");
3146    dprintf(fd, "-------+-----------\n");
3147    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3148        i != mPendingFrameDropList.end(); i++) {
3149        dprintf(fd, " %5d | %9d \n",
3150            i->frame_number, i->stream_ID);
3151    }
3152    dprintf(fd, "-------+-----------\n");
3153
3154    dprintf(fd, "\n Camera HAL3 information End \n");
3155
3156    /* use dumpsys media.camera as trigger to send update debug level event */
3157    mUpdateDebugLevel = true;
3158    pthread_mutex_unlock(&mMutex);
3159    return;
3160}
3161
3162/*===========================================================================
3163 * FUNCTION   : flush
3164 *
3165 * DESCRIPTION:
3166 *
3167 * PARAMETERS :
3168 *
3169 *
3170 * RETURN     :
3171 *==========================================================================*/
3172int QCamera3HardwareInterface::flush()
3173{
3174    ATRACE_CALL();
3175    unsigned int frameNum = 0;
3176    camera3_capture_result_t result;
3177    camera3_stream_buffer_t *pStream_Buf = NULL;
3178    FlushMap flushMap;
3179
3180    CDBG("%s: Unblocking Process Capture Request", __func__);
3181    pthread_mutex_lock(&mMutex);
3182    mFlush = true;
3183    pthread_mutex_unlock(&mMutex);
3184
3185    memset(&result, 0, sizeof(camera3_capture_result_t));
3186
3187    // Stop the Streams/Channels
3188    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3189        it != mStreamInfo.end(); it++) {
3190        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3191        channel->stop();
3192        (*it)->status = INVALID;
3193    }
3194
3195    if (mSupportChannel) {
3196        mSupportChannel->stop();
3197    }
3198    if (mAnalysisChannel) {
3199        mAnalysisChannel->stop();
3200    }
3201    if (mRawDumpChannel) {
3202        mRawDumpChannel->stop();
3203    }
3204    if (mMetadataChannel) {
3205        /* If content of mStreamInfo is not 0, there is metadata stream */
3206        mMetadataChannel->stop();
3207    }
3208
3209    // Mutex Lock
3210    pthread_mutex_lock(&mMutex);
3211
3212    // Unblock process_capture_request
3213    mPendingRequest = 0;
3214    pthread_cond_signal(&mRequestCond);
3215
3216    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
3217    frameNum = i->frame_number;
3218    CDBG("%s: Oldest frame num on  mPendingRequestsList = %d",
3219      __func__, frameNum);
3220
3221    // Go through the pending buffers and group them depending
3222    // on frame number
3223    for (List<PendingBufferInfo>::iterator k =
3224            mPendingBuffersMap.mPendingBufferList.begin();
3225            k != mPendingBuffersMap.mPendingBufferList.end();) {
3226
3227        if (k->frame_number < frameNum) {
3228            ssize_t idx = flushMap.indexOfKey(k->frame_number);
3229            if (idx == NAME_NOT_FOUND) {
3230                Vector<PendingBufferInfo> pending;
3231                pending.add(*k);
3232                flushMap.add(k->frame_number, pending);
3233            } else {
3234                Vector<PendingBufferInfo> &pending =
3235                        flushMap.editValueFor(k->frame_number);
3236                pending.add(*k);
3237            }
3238
3239            mPendingBuffersMap.num_buffers--;
3240            k = mPendingBuffersMap.mPendingBufferList.erase(k);
3241        } else {
3242            k++;
3243        }
3244    }
3245
3246    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
3247        uint32_t frame_number = flushMap.keyAt(iFlush);
3248        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
3249
3250        // Send Error notify to frameworks for each buffer for which
3251        // metadata buffer is already sent
3252        CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
3253          __func__, frame_number, pending.size());
3254
3255        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
3256        if (NULL == pStream_Buf) {
3257            ALOGE("%s: No memory for pending buffers array", __func__);
3258            pthread_mutex_unlock(&mMutex);
3259            return NO_MEMORY;
3260        }
3261        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
3262
3263        for (size_t j = 0; j < pending.size(); j++) {
3264            const PendingBufferInfo &info = pending.itemAt(j);
3265            camera3_notify_msg_t notify_msg;
3266            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3267            notify_msg.type = CAMERA3_MSG_ERROR;
3268            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
3269            notify_msg.message.error.error_stream = info.stream;
3270            notify_msg.message.error.frame_number = frame_number;
3271            pStream_Buf[j].acquire_fence = -1;
3272            pStream_Buf[j].release_fence = -1;
3273            pStream_Buf[j].buffer = info.buffer;
3274            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
3275            pStream_Buf[j].stream = info.stream;
3276            mCallbackOps->notify(mCallbackOps, &notify_msg);
3277            CDBG("%s: notify frame_number = %d stream %p", __func__,
3278                    frame_number, info.stream);
3279        }
3280
3281        result.result = NULL;
3282        result.frame_number = frame_number;
3283        result.num_output_buffers = (uint32_t)pending.size();
3284        result.output_buffers = pStream_Buf;
3285        mCallbackOps->process_capture_result(mCallbackOps, &result);
3286
3287        delete [] pStream_Buf;
3288    }
3289
3290    CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__);
3291
3292    flushMap.clear();
3293    for (List<PendingBufferInfo>::iterator k =
3294            mPendingBuffersMap.mPendingBufferList.begin();
3295            k != mPendingBuffersMap.mPendingBufferList.end();) {
3296        ssize_t idx = flushMap.indexOfKey(k->frame_number);
3297        if (idx == NAME_NOT_FOUND) {
3298            Vector<PendingBufferInfo> pending;
3299            pending.add(*k);
3300            flushMap.add(k->frame_number, pending);
3301        } else {
3302            Vector<PendingBufferInfo> &pending =
3303                    flushMap.editValueFor(k->frame_number);
3304            pending.add(*k);
3305        }
3306
3307        mPendingBuffersMap.num_buffers--;
3308        k = mPendingBuffersMap.mPendingBufferList.erase(k);
3309    }
3310
3311    // Go through the pending requests info and send error request to framework
3312    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
3313        uint32_t frame_number = flushMap.keyAt(iFlush);
3314        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
3315        CDBG("%s:Sending ERROR REQUEST for frame %d",
3316              __func__, frame_number);
3317
3318        // Send shutter notify to frameworks
3319        camera3_notify_msg_t notify_msg;
3320        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3321        notify_msg.type = CAMERA3_MSG_ERROR;
3322        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
3323        notify_msg.message.error.error_stream = NULL;
3324        notify_msg.message.error.frame_number = frame_number;
3325        mCallbackOps->notify(mCallbackOps, &notify_msg);
3326
3327        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
3328        if (NULL == pStream_Buf) {
3329            ALOGE("%s: No memory for pending buffers array", __func__);
3330            pthread_mutex_unlock(&mMutex);
3331            return NO_MEMORY;
3332        }
3333        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
3334
3335        for (size_t j = 0; j < pending.size(); j++) {
3336            const PendingBufferInfo &info = pending.itemAt(j);
3337            pStream_Buf[j].acquire_fence = -1;
3338            pStream_Buf[j].release_fence = -1;
3339            pStream_Buf[j].buffer = info.buffer;
3340            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
3341            pStream_Buf[j].stream = info.stream;
3342        }
3343
3344        result.num_output_buffers = (uint32_t)pending.size();
3345        result.output_buffers = pStream_Buf;
3346        result.result = NULL;
3347        result.frame_number = frame_number;
3348        mCallbackOps->process_capture_result(mCallbackOps, &result);
3349        delete [] pStream_Buf;
3350    }
3351
3352    /* Reset pending buffer list and requests list */
3353    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
3354                i != mPendingRequestsList.end(); i++) {
3355        clearInputBuffer(i->input_buffer);
3356        i = mPendingRequestsList.erase(i);
3357    }
3358    /* Reset pending frame Drop list and requests list */
3359    mPendingFrameDropList.clear();
3360
3361    flushMap.clear();
3362    mPendingBuffersMap.num_buffers = 0;
3363    mPendingBuffersMap.mPendingBufferList.clear();
3364    mPendingReprocessResultList.clear();
3365    CDBG("%s: Cleared all the pending buffers ", __func__);
3366
3367    mFlush = false;
3368
3369    // Start the Streams/Channels
3370    int rc = NO_ERROR;
3371    if (mMetadataChannel) {
3372        /* If content of mStreamInfo is not 0, there is metadata stream */
3373        rc = mMetadataChannel->start();
3374        if (rc < 0) {
3375            ALOGE("%s: META channel start failed", __func__);
3376            pthread_mutex_unlock(&mMutex);
3377            return rc;
3378        }
3379    }
3380    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3381        it != mStreamInfo.end(); it++) {
3382        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3383        rc = channel->start();
3384        if (rc < 0) {
3385            ALOGE("%s: channel start failed", __func__);
3386            pthread_mutex_unlock(&mMutex);
3387            return rc;
3388        }
3389    }
3390    if (mAnalysisChannel) {
3391        mAnalysisChannel->start();
3392    }
3393    if (mSupportChannel) {
3394        rc = mSupportChannel->start();
3395        if (rc < 0) {
3396            ALOGE("%s: Support channel start failed", __func__);
3397            pthread_mutex_unlock(&mMutex);
3398            return rc;
3399        }
3400    }
3401    if (mRawDumpChannel) {
3402        rc = mRawDumpChannel->start();
3403        if (rc < 0) {
3404            ALOGE("%s: RAW dump channel start failed", __func__);
3405            pthread_mutex_unlock(&mMutex);
3406            return rc;
3407        }
3408    }
3409
3410    pthread_mutex_unlock(&mMutex);
3411
3412    return 0;
3413}
3414
3415/*===========================================================================
3416 * FUNCTION   : captureResultCb
3417 *
3418 * DESCRIPTION: Callback handler for all capture result
3419 *              (streams, as well as metadata)
3420 *
3421 * PARAMETERS :
3422 *   @metadata : metadata information
3423 *   @buffer   : actual gralloc buffer to be returned to frameworks.
3424 *               NULL if metadata.
3425 *
3426 * RETURN     : NONE
3427 *==========================================================================*/
3428void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
3429                camera3_stream_buffer_t *buffer, uint32_t frame_number)
3430{
3431    if (metadata_buf) {
3432        if (mBatchSize) {
3433            handleBatchMetadata(metadata_buf,
3434                    true /* free_and_bufdone_meta_buf */);
3435        } else { /* mBatchSize = 0 */
3436            pthread_mutex_lock(&mMutex);
3437            handleMetadataWithLock(metadata_buf,
3438                    true /* free_and_bufdone_meta_buf */);
3439            pthread_mutex_unlock(&mMutex);
3440        }
3441    } else {
3442        pthread_mutex_lock(&mMutex);
3443        handleBufferWithLock(buffer, frame_number);
3444        pthread_mutex_unlock(&mMutex);
3445    }
3446    return;
3447}
3448
3449/*===========================================================================
3450 * FUNCTION   : lookupFwkName
3451 *
3452 * DESCRIPTION: In case the enum is not same in fwk and backend
3453 *              make sure the parameter is correctly propogated
3454 *
3455 * PARAMETERS  :
3456 *   @arr      : map between the two enums
3457 *   @len      : len of the map
3458 *   @hal_name : name of the hal_parm to map
3459 *
3460 * RETURN     : int type of status
3461 *              fwk_name  -- success
3462 *              none-zero failure code
3463 *==========================================================================*/
3464template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
3465        size_t len, halType hal_name)
3466{
3467
3468    for (size_t i = 0; i < len; i++) {
3469        if (arr[i].hal_name == hal_name) {
3470            return arr[i].fwk_name;
3471        }
3472    }
3473
3474    /* Not able to find matching framework type is not necessarily
3475     * an error case. This happens when mm-camera supports more attributes
3476     * than the frameworks do */
3477    CDBG_HIGH("%s: Cannot find matching framework type", __func__);
3478    return NAME_NOT_FOUND;
3479}
3480
3481/*===========================================================================
3482 * FUNCTION   : lookupHalName
3483 *
3484 * DESCRIPTION: In case the enum is not same in fwk and backend
3485 *              make sure the parameter is correctly propogated
3486 *
3487 * PARAMETERS  :
3488 *   @arr      : map between the two enums
3489 *   @len      : len of the map
3490 *   @fwk_name : name of the hal_parm to map
3491 *
3492 * RETURN     : int32_t type of status
3493 *              hal_name  -- success
3494 *              none-zero failure code
3495 *==========================================================================*/
3496template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
3497        size_t len, fwkType fwk_name)
3498{
3499    for (size_t i = 0; i < len; i++) {
3500        if (arr[i].fwk_name == fwk_name) {
3501            return arr[i].hal_name;
3502        }
3503    }
3504
3505    ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
3506    return NAME_NOT_FOUND;
3507}
3508
3509/*===========================================================================
3510 * FUNCTION   : lookupProp
3511 *
3512 * DESCRIPTION: lookup a value by its name
3513 *
3514 * PARAMETERS :
3515 *   @arr     : map between the two enums
3516 *   @len     : size of the map
3517 *   @name    : name to be looked up
3518 *
3519 * RETURN     : Value if found
3520 *              CAM_CDS_MODE_MAX if not found
3521 *==========================================================================*/
3522template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
3523        size_t len, const char *name)
3524{
3525    if (name) {
3526        for (size_t i = 0; i < len; i++) {
3527            if (!strcmp(arr[i].desc, name)) {
3528                return arr[i].val;
3529            }
3530        }
3531    }
3532    return CAM_CDS_MODE_MAX;
3533}
3534
3535/*===========================================================================
3536 * FUNCTION   : clearInputBuffer
3537 *
3538 * DESCRIPTION: free the input buffer
3539 *
3540 * PARAMETERS :
3541 *   @input_buffer : ptr to input buffer data to be freed
3542 *
3543 * RETURN     : NONE
3544 *==========================================================================*/
3545void QCamera3HardwareInterface::clearInputBuffer(camera3_stream_buffer_t *input_buffer)
3546{
3547    if (input_buffer) {
3548        free(input_buffer);
3549        input_buffer = NULL;
3550    }
3551}
3552
3553/*===========================================================================
3554 *
3555 * DESCRIPTION:
3556 *
3557 * PARAMETERS :
3558 *   @metadata : metadata information from callback
3559 *   @timestamp: metadata buffer timestamp
3560 *   @request_id: request id
3561 *   @jpegMetadata: additional jpeg metadata
3562 *
3563 * RETURN     : camera_metadata_t*
3564 *              metadata in a format specified by fwk
3565 *==========================================================================*/
3566camera_metadata_t*
3567QCamera3HardwareInterface::translateFromHalMetadata(
3568                                 metadata_buffer_t *metadata,
3569                                 nsecs_t timestamp,
3570                                 int32_t request_id,
3571                                 const CameraMetadata& jpegMetadata,
3572                                 uint8_t pipeline_depth,
3573                                 uint8_t capture_intent)
3574{
3575    CameraMetadata camMetadata;
3576    camera_metadata_t *resultMetadata;
3577
3578    if (jpegMetadata.entryCount())
3579        camMetadata.append(jpegMetadata);
3580
3581    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
3582    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
3583    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
3584    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
3585
3586    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
3587        int64_t fwk_frame_number = *frame_number;
3588        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
3589    }
3590
3591    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
3592        int32_t fps_range[2];
3593        fps_range[0] = (int32_t)float_range->min_fps;
3594        fps_range[1] = (int32_t)float_range->max_fps;
3595        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3596                                      fps_range, 2);
3597        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
3598            __func__, fps_range[0], fps_range[1]);
3599    }
3600
3601    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
3602        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
3603    }
3604
3605    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
3606        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
3607                METADATA_MAP_SIZE(SCENE_MODES_MAP),
3608                *sceneMode);
3609        if (NAME_NOT_FOUND != val) {
3610            uint8_t fwkSceneMode = (uint8_t)val;
3611            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
3612            CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
3613                    __func__, fwkSceneMode);
3614        }
3615    }
3616
3617    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
3618        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
3619        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
3620    }
3621
3622    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
3623        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
3624        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
3625    }
3626
3627    IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
3628            CAM_INTF_META_FACE_DETECTION, metadata) {
3629        uint8_t numFaces = MIN(faceDetectionInfo->num_faces_detected, MAX_ROI);
3630        int32_t faceIds[MAX_ROI];
3631        uint8_t faceScores[MAX_ROI];
3632        int32_t faceRectangles[MAX_ROI * 4];
3633        int32_t faceLandmarks[MAX_ROI * 6];
3634        size_t j = 0, k = 0;
3635        for (size_t i = 0; i < numFaces; i++) {
3636            faceIds[i] = faceDetectionInfo->faces[i].face_id;
3637            faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
3638            convertToRegions(faceDetectionInfo->faces[i].face_boundary,
3639                faceRectangles+j, -1);
3640            convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
3641            j+= 4;
3642            k+= 6;
3643        }
3644        if (numFaces <= 0) {
3645            memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
3646            memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
3647            memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
3648            memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
3649        }
3650        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
3651        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
3652        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, faceRectangles, numFaces * 4U);
3653        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, faceLandmarks, numFaces * 6U);
3654    }
3655
3656    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
3657        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
3658        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
3659    }
3660
3661    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
3662            CAM_INTF_META_EDGE_MODE, metadata) {
3663        uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
3664        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
3665        camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
3666    }
3667
3668    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
3669        uint8_t fwk_flashPower = (uint8_t) *flashPower;
3670        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
3671    }
3672
3673    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
3674        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
3675    }
3676
3677    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
3678        if (0 <= *flashState) {
3679            uint8_t fwk_flashState = (uint8_t) *flashState;
3680            if (!gCamCapability[mCameraId]->flash_available) {
3681                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
3682            }
3683            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
3684        }
3685    }
3686
3687    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
3688        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
3689        if (NAME_NOT_FOUND != val) {
3690            uint8_t fwk_flashMode = (uint8_t)val;
3691            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
3692        }
3693    }
3694
3695    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
3696        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
3697        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
3698    }
3699
3700    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
3701        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
3702    }
3703
3704    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
3705        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
3706    }
3707
3708    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
3709        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
3710    }
3711
3712    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
3713        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
3714        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
3715    }
3716
3717    /*EIS is currently not hooked up to the app, so set the mode to OFF*/
3718    uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3719    camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
3720
3721    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
3722        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
3723        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
3724    }
3725
3726    IF_META_AVAILABLE(uint32_t, noiseRedStrength, CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata) {
3727        uint8_t fwk_noiseRedStrength = (uint8_t) *noiseRedStrength;
3728        camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, &fwk_noiseRedStrength, 1);
3729    }
3730
3731    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
3732        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
3733    }
3734
3735    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
3736            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
3737        int32_t scalerCropRegion[4];
3738        scalerCropRegion[0] = hScalerCropRegion->left;
3739        scalerCropRegion[1] = hScalerCropRegion->top;
3740        scalerCropRegion[2] = hScalerCropRegion->width;
3741        scalerCropRegion[3] = hScalerCropRegion->height;
3742
3743        // Adjust crop region from sensor output coordinate system to active
3744        // array coordinate system.
3745        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
3746                scalerCropRegion[2], scalerCropRegion[3]);
3747
3748        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
3749    }
3750
3751    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
3752        CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
3753        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
3754    }
3755
3756    IF_META_AVAILABLE(int64_t, sensorFameDuration,
3757            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
3758        CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
3759        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
3760    }
3761
3762    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
3763            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
3764        CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
3765        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
3766                sensorRollingShutterSkew, 1);
3767    }
3768
3769    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
3770        CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
3771        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
3772
3773        //calculate the noise profile based on sensitivity
3774        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
3775        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
3776        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
3777        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
3778            noise_profile[i]   = noise_profile_S;
3779            noise_profile[i+1] = noise_profile_O;
3780        }
3781        CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
3782                noise_profile_S, noise_profile_O);
3783        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
3784                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
3785    }
3786
3787    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
3788        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
3789        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
3790    }
3791
3792    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
3793        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
3794                *faceDetectMode);
3795        if (NAME_NOT_FOUND != val) {
3796            uint8_t fwk_faceDetectMode = (uint8_t)val;
3797            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
3798        }
3799    }
3800
3801    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
3802        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
3803        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
3804    }
3805
3806    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
3807            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
3808        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
3809        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
3810    }
3811
3812    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
3813            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
3814        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
3815                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
3816    }
3817
3818    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
3819            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
3820        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
3821                CAM_MAX_SHADING_MAP_HEIGHT);
3822        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
3823                CAM_MAX_SHADING_MAP_WIDTH);
3824        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
3825                lensShadingMap->lens_shading, 4U * map_width * map_height);
3826    }
3827
3828    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
3829        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
3830        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
3831    }
3832
3833    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
3834        //Populate CAM_INTF_META_TONEMAP_CURVES
3835        /* ch0 = G, ch 1 = B, ch 2 = R*/
3836        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
3837            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
3838                    __func__, tonemap->tonemap_points_cnt,
3839                    CAM_MAX_TONEMAP_CURVE_SIZE);
3840            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
3841        }
3842
3843        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
3844                        &tonemap->curves[0].tonemap_points[0][0],
3845                        tonemap->tonemap_points_cnt * 2);
3846
3847        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
3848                        &tonemap->curves[1].tonemap_points[0][0],
3849                        tonemap->tonemap_points_cnt * 2);
3850
3851        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
3852                        &tonemap->curves[2].tonemap_points[0][0],
3853                        tonemap->tonemap_points_cnt * 2);
3854    }
3855
3856    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
3857            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
3858        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
3859                CC_GAINS_COUNT);
3860    }
3861
3862    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
3863            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
3864        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
3865                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
3866                CC_MATRIX_COLS * CC_MATRIX_ROWS);
3867    }
3868
3869    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
3870            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
3871        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
3872            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
3873                    __func__, toneCurve->tonemap_points_cnt,
3874                    CAM_MAX_TONEMAP_CURVE_SIZE);
3875            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
3876        }
3877        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
3878                (float*)toneCurve->curve.tonemap_points,
3879                toneCurve->tonemap_points_cnt * 2);
3880    }
3881
3882    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
3883            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
3884        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
3885                predColorCorrectionGains->gains, 4);
3886    }
3887
3888    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
3889            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
3890        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
3891                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
3892                CC_MATRIX_ROWS * CC_MATRIX_COLS);
3893    }
3894
3895    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
3896        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
3897    }
3898
3899    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
3900        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
3901        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
3902    }
3903
3904    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
3905        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
3906        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
3907    }
3908
3909    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
3910        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
3911                *effectMode);
3912        if (NAME_NOT_FOUND != val) {
3913            uint8_t fwk_effectMode = (uint8_t)val;
3914            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
3915        }
3916    }
3917
3918    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
3919            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
3920        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
3921                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
3922        if (NAME_NOT_FOUND != fwk_testPatternMode) {
3923            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
3924        }
3925        int32_t fwk_testPatternData[4];
3926        fwk_testPatternData[0] = testPatternData->r;
3927        fwk_testPatternData[3] = testPatternData->b;
3928        switch (gCamCapability[mCameraId]->color_arrangement) {
3929        case CAM_FILTER_ARRANGEMENT_RGGB:
3930        case CAM_FILTER_ARRANGEMENT_GRBG:
3931            fwk_testPatternData[1] = testPatternData->gr;
3932            fwk_testPatternData[2] = testPatternData->gb;
3933            break;
3934        case CAM_FILTER_ARRANGEMENT_GBRG:
3935        case CAM_FILTER_ARRANGEMENT_BGGR:
3936            fwk_testPatternData[2] = testPatternData->gr;
3937            fwk_testPatternData[1] = testPatternData->gb;
3938            break;
3939        default:
3940            ALOGE("%s: color arrangement %d is not supported", __func__,
3941                gCamCapability[mCameraId]->color_arrangement);
3942            break;
3943        }
3944        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
3945    }
3946
3947    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
3948        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
3949    }
3950
3951    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
3952        String8 str((const char *)gps_methods);
3953        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
3954    }
3955
3956    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
3957        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
3958    }
3959
3960    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
3961        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
3962    }
3963
3964    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
3965        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
3966        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
3967    }
3968
3969    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
3970        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
3971        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
3972    }
3973
3974    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
3975        int32_t fwk_thumb_size[2];
3976        fwk_thumb_size[0] = thumb_size->width;
3977        fwk_thumb_size[1] = thumb_size->height;
3978        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
3979    }
3980
3981    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
3982        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
3983                privateData,
3984                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
3985    }
3986
3987    if (metadata->is_tuning_params_valid) {
3988        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
3989        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
3990        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
3991
3992
3993        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
3994                sizeof(uint32_t));
3995        data += sizeof(uint32_t);
3996
3997        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
3998                sizeof(uint32_t));
3999        CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4000        data += sizeof(uint32_t);
4001
4002        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4003                sizeof(uint32_t));
4004        CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4005        data += sizeof(uint32_t);
4006
4007        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4008                sizeof(uint32_t));
4009        CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4010        data += sizeof(uint32_t);
4011
4012        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4013                sizeof(uint32_t));
4014        CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4015        data += sizeof(uint32_t);
4016
4017        metadata->tuning_params.tuning_mod3_data_size = 0;
4018        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4019                sizeof(uint32_t));
4020        CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4021        data += sizeof(uint32_t);
4022
4023        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4024                TUNING_SENSOR_DATA_MAX);
4025        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4026                count);
4027        data += count;
4028
4029        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4030                TUNING_VFE_DATA_MAX);
4031        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4032                count);
4033        data += count;
4034
4035        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4036                TUNING_CPP_DATA_MAX);
4037        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4038                count);
4039        data += count;
4040
4041        count = MIN(metadata->tuning_params.tuning_cac_data_size,
4042                TUNING_CAC_DATA_MAX);
4043        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4044                count);
4045        data += count;
4046
4047        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4048                (int32_t *)(void *)tuning_meta_data_blob,
4049                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4050    }
4051
4052    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4053            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4054        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4055                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4056                NEUTRAL_COL_POINTS);
4057    }
4058
4059    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4060        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4061        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4062    }
4063
4064    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4065        int32_t aeRegions[REGIONS_TUPLE_COUNT];
4066        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4067        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4068                REGIONS_TUPLE_COUNT);
4069        CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4070                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4071                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4072                hAeRegions->rect.height);
4073    }
4074
4075    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4076        /*af regions*/
4077        int32_t afRegions[REGIONS_TUPLE_COUNT];
4078        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4079        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4080                REGIONS_TUPLE_COUNT);
4081        CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4082                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4083                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4084                hAfRegions->rect.height);
4085    }
4086
4087    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4088        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4089                *hal_ab_mode);
4090        if (NAME_NOT_FOUND != val) {
4091            uint8_t fwk_ab_mode = (uint8_t)val;
4092            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4093        }
4094    }
4095
4096    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4097        int val = lookupFwkName(SCENE_MODES_MAP,
4098                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4099        if (NAME_NOT_FOUND != val) {
4100            uint8_t fwkBestshotMode = (uint8_t)val;
4101            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4102            CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4103        } else {
4104            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4105        }
4106    }
4107
4108    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4109         uint8_t fwk_mode = (uint8_t) *mode;
4110         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4111    }
4112
4113    /* Constant metadata values to be update*/
4114    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4115    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4116
4117    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4118    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4119
4120    int32_t hotPixelMap[2];
4121    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4122
4123    // CDS
4124    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4125        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4126    }
4127
4128    // Reprocess crop data
4129    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4130        uint8_t cnt = crop_data->num_of_streams;
4131        if ((0 < cnt) && (cnt < MAX_NUM_STREAMS)) {
4132            int rc = NO_ERROR;
4133            int32_t *crop = new int32_t[cnt*4];
4134            if (NULL == crop) {
4135                rc = NO_MEMORY;
4136            }
4137
4138            int32_t *crop_stream_ids = new int32_t[cnt];
4139            if (NULL == crop_stream_ids) {
4140                rc = NO_MEMORY;
4141            }
4142
4143            Vector<int32_t> roi_map;
4144
4145            if (NO_ERROR == rc) {
4146                int32_t steams_found = 0;
4147                for (size_t i = 0; i < cnt; i++) {
4148                    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4149                        it != mStreamInfo.end(); it++) {
4150                        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4151                        if (NULL != channel) {
4152                            if (crop_data->crop_info[i].stream_id ==
4153                                    channel->mStreams[0]->getMyServerID()) {
4154                                crop[steams_found*4] = crop_data->crop_info[i].crop.left;
4155                                crop[steams_found*4 + 1] = crop_data->crop_info[i].crop.top;
4156                                crop[steams_found*4 + 2] = crop_data->crop_info[i].crop.width;
4157                                crop[steams_found*4 + 3] = crop_data->crop_info[i].crop.height;
4158                                // In a more general case we may want to generate
4159                                // unique id depending on width, height, stream, private
4160                                // data etc.
4161#ifdef __LP64__
4162                                // Using XORed value of lower and upper halves as ID
4163                                crop_stream_ids[steams_found] = (int32_t)
4164                                        ((((int64_t)(*it)->stream) & 0x0000FFFF) ^
4165                                                (((int64_t)(*it)->stream) >> 0x20 & 0x0000FFFF));
4166#else
4167                                // FIXME: Although using data address as ID doesn't guarantee
4168                                // that all IDs will be unique, we are keeping existing nostrum
4169                                // for now till found better solution.
4170                                crop_stream_ids[steams_found] = (int32_t)(*it)->stream;
4171#endif
4172                                steams_found++;
4173                                roi_map.add(crop_data->crop_info[i].roi_map.left);
4174                                roi_map.add(crop_data->crop_info[i].roi_map.top);
4175                                roi_map.add(crop_data->crop_info[i].roi_map.width);
4176                                roi_map.add(crop_data->crop_info[i].roi_map.height);
4177                                CDBG("%s: Adding reprocess crop data for stream %p %dx%d, %dx%d",
4178                                        __func__,
4179                                        (*it)->stream,
4180                                        crop_data->crop_info[i].crop.left,
4181                                        crop_data->crop_info[i].crop.top,
4182                                        crop_data->crop_info[i].crop.width,
4183                                        crop_data->crop_info[i].crop.height);
4184                                CDBG("%s: Adding reprocess crop roi map for stream %p %dx%d, %dx%d",
4185                                        __func__,
4186                                        (*it)->stream,
4187                                        crop_data->crop_info[i].roi_map.left,
4188                                        crop_data->crop_info[i].roi_map.top,
4189                                        crop_data->crop_info[i].roi_map.width,
4190                                        crop_data->crop_info[i].roi_map.height);
4191                                break;
4192                            }
4193                        }
4194                    }
4195                }
4196
4197                camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4198                        &steams_found, 1);
4199                camMetadata.update(QCAMERA3_CROP_REPROCESS,
4200                        crop, (size_t)(steams_found * 4));
4201                camMetadata.update(QCAMERA3_CROP_STREAM_ID_REPROCESS,
4202                        crop_stream_ids, (size_t)steams_found);
4203                if (roi_map.array()) {
4204                    camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4205                            roi_map.array(), roi_map.size());
4206                }
4207            }
4208
4209            if (crop) {
4210                delete [] crop;
4211            }
4212            if (crop_stream_ids) {
4213                delete [] crop_stream_ids;
4214            }
4215        } else {
4216            // mm-qcamera-daemon only posts crop_data for streams
4217            // not linked to pproc. So no valid crop metadata is not
4218            // necessarily an error case.
4219            CDBG("%s: No valid crop metadata entries", __func__);
4220        }
4221    }
4222
4223    IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4224        int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4225                *cacMode);
4226        if (NAME_NOT_FOUND != val) {
4227            uint8_t fwkCacMode = (uint8_t)val;
4228            camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4229        } else {
4230            ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4231        }
4232    }
4233
4234    resultMetadata = camMetadata.release();
4235    return resultMetadata;
4236}
4237
4238/*===========================================================================
4239 * FUNCTION   : saveExifParams
4240 *
4241 * DESCRIPTION:
4242 *
4243 * PARAMETERS :
4244 *   @metadata : metadata information from callback
4245 *
4246 * RETURN     : none
4247 *
4248 *==========================================================================*/
4249void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
4250{
4251    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
4252            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
4253        mExifParams.ae_debug_params = *ae_exif_debug_params;
4254        mExifParams.ae_debug_params_valid = TRUE;
4255    }
4256    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
4257            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
4258        mExifParams.awb_debug_params = *awb_exif_debug_params;
4259        mExifParams.awb_debug_params_valid = TRUE;
4260    }
4261    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
4262            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
4263        mExifParams.af_debug_params = *af_exif_debug_params;
4264        mExifParams.af_debug_params_valid = TRUE;
4265    }
4266    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
4267            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
4268        mExifParams.asd_debug_params = *asd_exif_debug_params;
4269        mExifParams.asd_debug_params_valid = TRUE;
4270    }
4271    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
4272            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
4273        mExifParams.stats_debug_params = *stats_exif_debug_params;
4274        mExifParams.stats_debug_params_valid = TRUE;
4275    }
4276}
4277
4278/*===========================================================================
4279 * FUNCTION   : get3AExifParams
4280 *
4281 * DESCRIPTION:
4282 *
4283 * PARAMETERS : none
4284 *
4285 *
4286 * RETURN     : mm_jpeg_exif_params_t
4287 *
4288 *==========================================================================*/
4289mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
4290{
4291    return mExifParams;
4292}
4293
4294/*===========================================================================
4295 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
4296 *
4297 * DESCRIPTION:
4298 *
4299 * PARAMETERS :
4300 *   @metadata : metadata information from callback
4301 *
4302 * RETURN     : camera_metadata_t*
4303 *              metadata in a format specified by fwk
4304 *==========================================================================*/
4305camera_metadata_t*
4306QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
4307                                (metadata_buffer_t *metadata)
4308{
4309    CameraMetadata camMetadata;
4310    camera_metadata_t *resultMetadata;
4311
4312    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4313        uint8_t fwk_afState = (uint8_t) *afState;
4314        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4315        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4316    }
4317
4318    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4319        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4320    }
4321
4322    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4323        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4324    }
4325
4326    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
4327        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
4328        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
4329        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
4330    }
4331
4332    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
4333        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
4334                &aecTrigger->trigger, 1);
4335        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
4336                &aecTrigger->trigger_id, 1);
4337        CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
4338                __func__, aecTrigger->trigger);
4339        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
4340                aecTrigger->trigger_id);
4341    }
4342
4343    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
4344        uint8_t fwk_ae_state = (uint8_t) *ae_state;
4345        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
4346        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
4347    }
4348
4349    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4350        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4351        if (NAME_NOT_FOUND != val) {
4352            uint8_t fwkAfMode = (uint8_t)val;
4353            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4354            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
4355        } else {
4356            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__,
4357                    val);
4358        }
4359    }
4360
4361    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
4362        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
4363                &af_trigger->trigger, 1);
4364        CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
4365                __func__, af_trigger->trigger);
4366        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
4367        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
4368                af_trigger->trigger_id);
4369    }
4370
4371    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
4372        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4373                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
4374        if (NAME_NOT_FOUND != val) {
4375            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
4376            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
4377            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
4378        } else {
4379            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
4380        }
4381    }
4382
4383    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4384    uint32_t aeMode = CAM_AE_MODE_MAX;
4385    int32_t flashMode = CAM_FLASH_MODE_MAX;
4386    int32_t redeye = -1;
4387    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
4388        aeMode = *pAeMode;
4389    }
4390    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
4391        flashMode = *pFlashMode;
4392    }
4393    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
4394        redeye = *pRedeye;
4395    }
4396
4397    if (1 == redeye) {
4398        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4399        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4400    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
4401        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
4402                flashMode);
4403        if (NAME_NOT_FOUND != val) {
4404            fwk_aeMode = (uint8_t)val;
4405            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4406        } else {
4407            ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
4408        }
4409    } else if (aeMode == CAM_AE_MODE_ON) {
4410        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
4411        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4412    } else if (aeMode == CAM_AE_MODE_OFF) {
4413        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4414        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4415    } else {
4416        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
4417              "flashMode:%d, aeMode:%u!!!",
4418                __func__, redeye, flashMode, aeMode);
4419    }
4420
4421    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4422        uint8_t fwk_lensState = *lensState;
4423        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4424    }
4425
4426    resultMetadata = camMetadata.release();
4427    return resultMetadata;
4428}
4429
4430/*===========================================================================
4431 * FUNCTION   : dumpMetadataToFile
4432 *
4433 * DESCRIPTION: Dumps tuning metadata to file system
4434 *
4435 * PARAMETERS :
4436 *   @meta           : tuning metadata
4437 *   @dumpFrameCount : current dump frame count
4438 *   @enabled        : Enable mask
4439 *
4440 *==========================================================================*/
4441void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
4442                                                   uint32_t &dumpFrameCount,
4443                                                   bool enabled,
4444                                                   const char *type,
4445                                                   uint32_t frameNumber)
4446{
4447    uint32_t frm_num = 0;
4448
4449    //Some sanity checks
4450    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
4451        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
4452              __func__,
4453              meta.tuning_sensor_data_size,
4454              TUNING_SENSOR_DATA_MAX);
4455        return;
4456    }
4457
4458    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
4459        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
4460              __func__,
4461              meta.tuning_vfe_data_size,
4462              TUNING_VFE_DATA_MAX);
4463        return;
4464    }
4465
4466    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
4467        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
4468              __func__,
4469              meta.tuning_cpp_data_size,
4470              TUNING_CPP_DATA_MAX);
4471        return;
4472    }
4473
4474    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
4475        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
4476              __func__,
4477              meta.tuning_cac_data_size,
4478              TUNING_CAC_DATA_MAX);
4479        return;
4480    }
4481    //
4482
4483    if(enabled){
4484        char timeBuf[FILENAME_MAX];
4485        char buf[FILENAME_MAX];
4486        memset(buf, 0, sizeof(buf));
4487        memset(timeBuf, 0, sizeof(timeBuf));
4488        time_t current_time;
4489        struct tm * timeinfo;
4490        time (&current_time);
4491        timeinfo = localtime (&current_time);
4492        if (timeinfo != NULL) {
4493            strftime (timeBuf, sizeof(timeBuf),
4494                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
4495        }
4496        String8 filePath(timeBuf);
4497        snprintf(buf,
4498                sizeof(buf),
4499                "%dm_%s_%d.bin",
4500                dumpFrameCount,
4501                type,
4502                frameNumber);
4503        filePath.append(buf);
4504        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
4505        if (file_fd >= 0) {
4506            ssize_t written_len = 0;
4507            meta.tuning_data_version = TUNING_DATA_VERSION;
4508            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
4509            written_len += write(file_fd, data, sizeof(uint32_t));
4510            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
4511            CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4512            written_len += write(file_fd, data, sizeof(uint32_t));
4513            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
4514            CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4515            written_len += write(file_fd, data, sizeof(uint32_t));
4516            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
4517            CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4518            written_len += write(file_fd, data, sizeof(uint32_t));
4519            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
4520            CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4521            written_len += write(file_fd, data, sizeof(uint32_t));
4522            meta.tuning_mod3_data_size = 0;
4523            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
4524            CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4525            written_len += write(file_fd, data, sizeof(uint32_t));
4526            size_t total_size = meta.tuning_sensor_data_size;
4527            data = (void *)((uint8_t *)&meta.data);
4528            written_len += write(file_fd, data, total_size);
4529            total_size = meta.tuning_vfe_data_size;
4530            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
4531            written_len += write(file_fd, data, total_size);
4532            total_size = meta.tuning_cpp_data_size;
4533            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
4534            written_len += write(file_fd, data, total_size);
4535            total_size = meta.tuning_cac_data_size;
4536            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
4537            written_len += write(file_fd, data, total_size);
4538            close(file_fd);
4539        }else {
4540            ALOGE("%s: fail to open file for metadata dumping", __func__);
4541        }
4542    }
4543}
4544
4545/*===========================================================================
4546 * FUNCTION   : cleanAndSortStreamInfo
4547 *
4548 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
4549 *              and sort them such that raw stream is at the end of the list
4550 *              This is a workaround for camera daemon constraint.
4551 *
4552 * PARAMETERS : None
4553 *
4554 *==========================================================================*/
4555void QCamera3HardwareInterface::cleanAndSortStreamInfo()
4556{
4557    List<stream_info_t *> newStreamInfo;
4558
4559    /*clean up invalid streams*/
4560    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
4561            it != mStreamInfo.end();) {
4562        if(((*it)->status) == INVALID){
4563            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
4564            delete channel;
4565            free(*it);
4566            it = mStreamInfo.erase(it);
4567        } else {
4568            it++;
4569        }
4570    }
4571
4572    // Move preview/video/callback/snapshot streams into newList
4573    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4574            it != mStreamInfo.end();) {
4575        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
4576                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
4577                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
4578            newStreamInfo.push_back(*it);
4579            it = mStreamInfo.erase(it);
4580        } else
4581            it++;
4582    }
4583    // Move raw streams into newList
4584    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4585            it != mStreamInfo.end();) {
4586        newStreamInfo.push_back(*it);
4587        it = mStreamInfo.erase(it);
4588    }
4589
4590    mStreamInfo = newStreamInfo;
4591}
4592
4593/*===========================================================================
4594 * FUNCTION   : extractJpegMetadata
4595 *
4596 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
4597 *              JPEG metadata is cached in HAL, and return as part of capture
4598 *              result when metadata is returned from camera daemon.
4599 *
4600 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
4601 *              @request:      capture request
4602 *
4603 *==========================================================================*/
4604void QCamera3HardwareInterface::extractJpegMetadata(
4605        CameraMetadata& jpegMetadata,
4606        const camera3_capture_request_t *request)
4607{
4608    CameraMetadata frame_settings;
4609    frame_settings = request->settings;
4610
4611    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
4612        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
4613                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
4614                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
4615
4616    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
4617        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
4618                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
4619                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
4620
4621    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
4622        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
4623                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
4624                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
4625
4626    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
4627        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
4628                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
4629                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
4630
4631    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
4632        jpegMetadata.update(ANDROID_JPEG_QUALITY,
4633                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
4634                frame_settings.find(ANDROID_JPEG_QUALITY).count);
4635
4636    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
4637        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
4638                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
4639                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
4640
4641    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
4642        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
4643                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
4644                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
4645}
4646
4647/*===========================================================================
4648 * FUNCTION   : convertToRegions
4649 *
4650 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
4651 *
4652 * PARAMETERS :
4653 *   @rect   : cam_rect_t struct to convert
4654 *   @region : int32_t destination array
4655 *   @weight : if we are converting from cam_area_t, weight is valid
4656 *             else weight = -1
4657 *
4658 *==========================================================================*/
4659void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
4660        int32_t *region, int weight)
4661{
4662    region[0] = rect.left;
4663    region[1] = rect.top;
4664    region[2] = rect.left + rect.width;
4665    region[3] = rect.top + rect.height;
4666    if (weight > -1) {
4667        region[4] = weight;
4668    }
4669}
4670
4671/*===========================================================================
4672 * FUNCTION   : convertFromRegions
4673 *
4674 * DESCRIPTION: helper method to convert from array to cam_rect_t
4675 *
4676 * PARAMETERS :
4677 *   @rect   : cam_rect_t struct to convert
4678 *   @region : int32_t destination array
4679 *   @weight : if we are converting from cam_area_t, weight is valid
4680 *             else weight = -1
4681 *
4682 *==========================================================================*/
4683void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
4684        const camera_metadata_t *settings, uint32_t tag)
4685{
4686    CameraMetadata frame_settings;
4687    frame_settings = settings;
4688    int32_t x_min = frame_settings.find(tag).data.i32[0];
4689    int32_t y_min = frame_settings.find(tag).data.i32[1];
4690    int32_t x_max = frame_settings.find(tag).data.i32[2];
4691    int32_t y_max = frame_settings.find(tag).data.i32[3];
4692    roi.weight = frame_settings.find(tag).data.i32[4];
4693    roi.rect.left = x_min;
4694    roi.rect.top = y_min;
4695    roi.rect.width = x_max - x_min;
4696    roi.rect.height = y_max - y_min;
4697}
4698
4699/*===========================================================================
4700 * FUNCTION   : resetIfNeededROI
4701 *
4702 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
4703 *              crop region
4704 *
4705 * PARAMETERS :
4706 *   @roi       : cam_area_t struct to resize
4707 *   @scalerCropRegion : cam_crop_region_t region to compare against
4708 *
4709 *
4710 *==========================================================================*/
4711bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
4712                                                 const cam_crop_region_t* scalerCropRegion)
4713{
4714    int32_t roi_x_max = roi->rect.width + roi->rect.left;
4715    int32_t roi_y_max = roi->rect.height + roi->rect.top;
4716    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
4717    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
4718
4719    /* According to spec weight = 0 is used to indicate roi needs to be disabled
4720     * without having this check the calculations below to validate if the roi
4721     * is inside scalar crop region will fail resulting in the roi not being
4722     * reset causing algorithm to continue to use stale roi window
4723     */
4724    if (roi->weight == 0) {
4725        return true;
4726    }
4727
4728    if ((roi_x_max < scalerCropRegion->left) ||
4729        // right edge of roi window is left of scalar crop's left edge
4730        (roi_y_max < scalerCropRegion->top)  ||
4731        // bottom edge of roi window is above scalar crop's top edge
4732        (roi->rect.left > crop_x_max) ||
4733        // left edge of roi window is beyond(right) of scalar crop's right edge
4734        (roi->rect.top > crop_y_max)){
4735        // top edge of roi windo is above scalar crop's top edge
4736        return false;
4737    }
4738    if (roi->rect.left < scalerCropRegion->left) {
4739        roi->rect.left = scalerCropRegion->left;
4740    }
4741    if (roi->rect.top < scalerCropRegion->top) {
4742        roi->rect.top = scalerCropRegion->top;
4743    }
4744    if (roi_x_max > crop_x_max) {
4745        roi_x_max = crop_x_max;
4746    }
4747    if (roi_y_max > crop_y_max) {
4748        roi_y_max = crop_y_max;
4749    }
4750    roi->rect.width = roi_x_max - roi->rect.left;
4751    roi->rect.height = roi_y_max - roi->rect.top;
4752    return true;
4753}
4754
4755/*===========================================================================
4756 * FUNCTION   : convertLandmarks
4757 *
4758 * DESCRIPTION: helper method to extract the landmarks from face detection info
4759 *
4760 * PARAMETERS :
4761 *   @face   : cam_rect_t struct to convert
4762 *   @landmarks : int32_t destination array
4763 *
4764 *
4765 *==========================================================================*/
4766void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
4767{
4768    landmarks[0] = (int32_t)face.left_eye_center.x;
4769    landmarks[1] = (int32_t)face.left_eye_center.y;
4770    landmarks[2] = (int32_t)face.right_eye_center.x;
4771    landmarks[3] = (int32_t)face.right_eye_center.y;
4772    landmarks[4] = (int32_t)face.mouth_center.x;
4773    landmarks[5] = (int32_t)face.mouth_center.y;
4774}
4775
4776#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
4777/*===========================================================================
4778 * FUNCTION   : initCapabilities
4779 *
4780 * DESCRIPTION: initialize camera capabilities in static data struct
4781 *
4782 * PARAMETERS :
4783 *   @cameraId  : camera Id
4784 *
4785 * RETURN     : int32_t type of status
4786 *              NO_ERROR  -- success
4787 *              none-zero failure code
4788 *==========================================================================*/
4789int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
4790{
4791    int rc = 0;
4792    mm_camera_vtbl_t *cameraHandle = NULL;
4793    QCamera3HeapMemory *capabilityHeap = NULL;
4794
4795    cameraHandle = camera_open((uint8_t)cameraId);
4796    if (!cameraHandle) {
4797        ALOGE("%s: camera_open failed", __func__);
4798        rc = -1;
4799        goto open_failed;
4800    }
4801
4802    capabilityHeap = new QCamera3HeapMemory();
4803    if (capabilityHeap == NULL) {
4804        ALOGE("%s: creation of capabilityHeap failed", __func__);
4805        goto heap_creation_failed;
4806    }
4807    /* Allocate memory for capability buffer */
4808    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
4809    if(rc != OK) {
4810        ALOGE("%s: No memory for cappability", __func__);
4811        goto allocate_failed;
4812    }
4813
4814    /* Map memory for capability buffer */
4815    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
4816    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
4817                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
4818                                capabilityHeap->getFd(0),
4819                                sizeof(cam_capability_t));
4820    if(rc < 0) {
4821        ALOGE("%s: failed to map capability buffer", __func__);
4822        goto map_failed;
4823    }
4824
4825    /* Query Capability */
4826    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
4827    if(rc < 0) {
4828        ALOGE("%s: failed to query capability",__func__);
4829        goto query_failed;
4830    }
4831    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
4832    if (!gCamCapability[cameraId]) {
4833        ALOGE("%s: out of memory", __func__);
4834        goto query_failed;
4835    }
4836    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
4837                                        sizeof(cam_capability_t));
4838    rc = 0;
4839
4840query_failed:
4841    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
4842                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
4843map_failed:
4844    capabilityHeap->deallocate();
4845allocate_failed:
4846    delete capabilityHeap;
4847heap_creation_failed:
4848    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
4849    cameraHandle = NULL;
4850open_failed:
4851    return rc;
4852}
4853
4854/*===========================================================================
4855 * FUNCTION   : initParameters
4856 *
4857 * DESCRIPTION: initialize camera parameters
4858 *
4859 * PARAMETERS :
4860 *
4861 * RETURN     : int32_t type of status
4862 *              NO_ERROR  -- success
4863 *              none-zero failure code
4864 *==========================================================================*/
4865int QCamera3HardwareInterface::initParameters()
4866{
4867    int rc = 0;
4868
4869    //Allocate Set Param Buffer
4870    mParamHeap = new QCamera3HeapMemory();
4871    rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
4872    if(rc != OK) {
4873        rc = NO_MEMORY;
4874        ALOGE("Failed to allocate SETPARM Heap memory");
4875        delete mParamHeap;
4876        mParamHeap = NULL;
4877        return rc;
4878    }
4879
4880    //Map memory for parameters buffer
4881    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
4882            CAM_MAPPING_BUF_TYPE_PARM_BUF,
4883            mParamHeap->getFd(0),
4884            sizeof(metadata_buffer_t));
4885    if(rc < 0) {
4886        ALOGE("%s:failed to map SETPARM buffer",__func__);
4887        rc = FAILED_TRANSACTION;
4888        mParamHeap->deallocate();
4889        delete mParamHeap;
4890        mParamHeap = NULL;
4891        return rc;
4892    }
4893
4894    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
4895
4896    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
4897    return rc;
4898}
4899
4900/*===========================================================================
4901 * FUNCTION   : deinitParameters
4902 *
4903 * DESCRIPTION: de-initialize camera parameters
4904 *
4905 * PARAMETERS :
4906 *
4907 * RETURN     : NONE
4908 *==========================================================================*/
4909void QCamera3HardwareInterface::deinitParameters()
4910{
4911    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
4912            CAM_MAPPING_BUF_TYPE_PARM_BUF);
4913
4914    mParamHeap->deallocate();
4915    delete mParamHeap;
4916    mParamHeap = NULL;
4917
4918    mParameters = NULL;
4919
4920    free(mPrevParameters);
4921    mPrevParameters = NULL;
4922}
4923
4924/*===========================================================================
4925 * FUNCTION   : calcMaxJpegSize
4926 *
4927 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
4928 *
4929 * PARAMETERS :
4930 *
4931 * RETURN     : max_jpeg_size
4932 *==========================================================================*/
4933size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
4934{
4935    size_t max_jpeg_size = 0;
4936    size_t temp_width, temp_height;
4937    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
4938            MAX_SIZES_CNT);
4939    for (size_t i = 0; i < count; i++) {
4940        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
4941        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
4942        if (temp_width * temp_height > max_jpeg_size ) {
4943            max_jpeg_size = temp_width * temp_height;
4944        }
4945    }
4946    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
4947    return max_jpeg_size;
4948}
4949
4950/*===========================================================================
4951 * FUNCTION   : getMaxRawSize
4952 *
4953 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
4954 *
4955 * PARAMETERS :
4956 *
4957 * RETURN     : Largest supported Raw Dimension
4958 *==========================================================================*/
4959cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
4960{
4961    int max_width = 0;
4962    cam_dimension_t maxRawSize;
4963
4964    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
4965    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
4966        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
4967            max_width = gCamCapability[camera_id]->raw_dim[i].width;
4968            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
4969        }
4970    }
4971    return maxRawSize;
4972}
4973
4974
4975/*===========================================================================
4976 * FUNCTION   : calcMaxJpegDim
4977 *
4978 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
4979 *
4980 * PARAMETERS :
4981 *
4982 * RETURN     : max_jpeg_dim
4983 *==========================================================================*/
4984cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
4985{
4986    cam_dimension_t max_jpeg_dim;
4987    cam_dimension_t curr_jpeg_dim;
4988    max_jpeg_dim.width = 0;
4989    max_jpeg_dim.height = 0;
4990    curr_jpeg_dim.width = 0;
4991    curr_jpeg_dim.height = 0;
4992    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
4993        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
4994        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
4995        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
4996            max_jpeg_dim.width * max_jpeg_dim.height ) {
4997            max_jpeg_dim.width = curr_jpeg_dim.width;
4998            max_jpeg_dim.height = curr_jpeg_dim.height;
4999        }
5000    }
5001    return max_jpeg_dim;
5002}
5003
5004
5005/*===========================================================================
5006 * FUNCTION   : initStaticMetadata
5007 *
5008 * DESCRIPTION: initialize the static metadata
5009 *
5010 * PARAMETERS :
5011 *   @cameraId  : camera Id
5012 *
5013 * RETURN     : int32_t type of status
5014 *              0  -- success
5015 *              non-zero failure code
5016 *==========================================================================*/
5017int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5018{
5019    int rc = 0;
5020    CameraMetadata staticInfo;
5021    size_t count = 0;
5022    bool limitedDevice = false;
5023
5024    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5025     * guaranteed, its advertised as limited device */
5026    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5027            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5028
5029    uint8_t supportedHwLvl = limitedDevice ?
5030            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5031            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
5032
5033    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5034            &supportedHwLvl, 1);
5035
5036    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5037    /*HAL 3 only*/
5038    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5039                    &gCamCapability[cameraId]->min_focus_distance, 1);
5040
5041    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5042                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
5043
5044    /*should be using focal lengths but sensor doesn't provide that info now*/
5045    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5046                      &gCamCapability[cameraId]->focal_length,
5047                      1);
5048
5049    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5050                      gCamCapability[cameraId]->apertures,
5051                      gCamCapability[cameraId]->apertures_count);
5052
5053    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5054                gCamCapability[cameraId]->filter_densities,
5055                gCamCapability[cameraId]->filter_densities_count);
5056
5057
5058    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5059                      (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5060                      gCamCapability[cameraId]->optical_stab_modes_count);
5061
5062    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5063            gCamCapability[cameraId]->lens_shading_map_size.height};
5064    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5065                      lens_shading_map_size,
5066                      sizeof(lens_shading_map_size)/sizeof(int32_t));
5067
5068    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5069            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5070
5071    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5072            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5073
5074    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5075            &gCamCapability[cameraId]->max_frame_duration, 1);
5076
5077    camera_metadata_rational baseGainFactor = {
5078            gCamCapability[cameraId]->base_gain_factor.numerator,
5079            gCamCapability[cameraId]->base_gain_factor.denominator};
5080    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5081                      &baseGainFactor, 1);
5082
5083    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5084                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5085
5086    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5087            gCamCapability[cameraId]->pixel_array_size.height};
5088    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5089                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5090
5091    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5092                                                gCamCapability[cameraId]->active_array_size.top,
5093                                                gCamCapability[cameraId]->active_array_size.width,
5094                                                gCamCapability[cameraId]->active_array_size.height};
5095    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5096                      active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5097
5098    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5099            &gCamCapability[cameraId]->white_level, 1);
5100
5101    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5102            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5103
5104    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5105                      &gCamCapability[cameraId]->flash_charge_duration, 1);
5106
5107    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5108                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5109
5110    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
5111    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
5112                      (int32_t *)&maxFaces, 1);
5113
5114    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
5115    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5116            &timestampSource, 1);
5117
5118    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5119                      &gCamCapability[cameraId]->histogram_size, 1);
5120
5121    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5122            &gCamCapability[cameraId]->max_histogram_count, 1);
5123
5124    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5125            gCamCapability[cameraId]->sharpness_map_size.height};
5126
5127    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5128            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5129
5130    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5131            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5132
5133    int32_t scalar_formats[] = {
5134            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5135            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5136            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5137            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5138            HAL_PIXEL_FORMAT_RAW10,
5139            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5140    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5141    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5142                      scalar_formats,
5143                      scalar_formats_count);
5144
5145    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5146    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5147    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
5148            count, MAX_SIZES_CNT, available_processed_sizes);
5149    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
5150            available_processed_sizes, count * 2);
5151
5152    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
5153    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
5154    makeTable(gCamCapability[cameraId]->raw_dim,
5155            count, MAX_SIZES_CNT, available_raw_sizes);
5156    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
5157            available_raw_sizes, count * 2);
5158
5159    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
5160    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
5161    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
5162            count, MAX_SIZES_CNT, available_fps_ranges);
5163    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5164            available_fps_ranges, count * 2);
5165
5166    camera_metadata_rational exposureCompensationStep = {
5167            gCamCapability[cameraId]->exp_compensation_step.numerator,
5168            gCamCapability[cameraId]->exp_compensation_step.denominator};
5169    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
5170                      &exposureCompensationStep, 1);
5171
5172    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
5173    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5174                      availableVstabModes, sizeof(availableVstabModes));
5175
5176    /*HAL 1 and HAL 3 common*/
5177    float maxZoom = 4;
5178    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5179            &maxZoom, 1);
5180
5181    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
5182    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
5183
5184    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
5185    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
5186        max3aRegions[2] = 0; /* AF not supported */
5187    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
5188            max3aRegions, 3);
5189
5190    uint8_t availableFaceDetectModes[] = {
5191            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
5192            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
5193    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5194            availableFaceDetectModes,
5195            sizeof(availableFaceDetectModes)/sizeof(availableFaceDetectModes[0]));
5196
5197    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
5198                                           gCamCapability[cameraId]->exposure_compensation_max};
5199    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
5200            exposureCompensationRange,
5201            sizeof(exposureCompensationRange)/sizeof(int32_t));
5202
5203    uint8_t lensFacing = (facingBack) ?
5204            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
5205    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
5206
5207    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5208                      available_thumbnail_sizes,
5209                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
5210
5211    /*all sizes will be clubbed into this tag*/
5212    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
5213    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5214    size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
5215            count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
5216            gCamCapability[cameraId]->max_downscale_factor);
5217    /*android.scaler.availableStreamConfigurations*/
5218    size_t max_stream_configs_size = count * scalar_formats_count * 4;
5219    int32_t available_stream_configs[max_stream_configs_size];
5220    /* Add input/output stream configurations for each scalar formats*/
5221    size_t idx = 0;
5222    for (size_t j = 0; j < scalar_formats_count; j++) {
5223        switch (scalar_formats[j]) {
5224        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5225        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5226        case HAL_PIXEL_FORMAT_RAW10:
5227            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5228                available_stream_configs[idx] = scalar_formats[j];
5229                available_stream_configs[idx+1] =
5230                    gCamCapability[cameraId]->raw_dim[i].width;
5231                available_stream_configs[idx+2] =
5232                    gCamCapability[cameraId]->raw_dim[i].height;
5233                available_stream_configs[idx+3] =
5234                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
5235                idx+=4;
5236            }
5237            break;
5238        case HAL_PIXEL_FORMAT_BLOB:
5239            for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
5240                available_stream_configs[idx] = scalar_formats[j];
5241                available_stream_configs[idx+1] = available_jpeg_sizes[i*2];
5242                available_stream_configs[idx+2] = available_jpeg_sizes[i*2+1];
5243                available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
5244                idx+=4;
5245            }
5246            break;
5247
5248        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
5249        case HAL_PIXEL_FORMAT_YCbCr_420_888:
5250        default:
5251            cam_dimension_t largest_picture_size;
5252            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
5253            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5254                available_stream_configs[idx] = scalar_formats[j];
5255                available_stream_configs[idx+1] =
5256                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5257                available_stream_configs[idx+2] =
5258                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5259                available_stream_configs[idx+3] =
5260                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
5261                idx+=4;
5262
5263                /* Book keep largest */
5264                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
5265                        >= largest_picture_size.width &&
5266                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
5267                        >= largest_picture_size.height)
5268                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
5269            }
5270
5271            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
5272            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
5273                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5274                available_stream_configs[idx] = scalar_formats[j];
5275                available_stream_configs[idx+1] = largest_picture_size.width;
5276                available_stream_configs[idx+2] = largest_picture_size.height;
5277                available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT;
5278                idx+=4;
5279            }
5280            break;
5281        }
5282    }
5283
5284    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5285                      available_stream_configs, idx);
5286    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5287    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5288
5289    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5290    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5291
5292    /* android.scaler.availableMinFrameDurations */
5293    int64_t available_min_durations[max_stream_configs_size];
5294    idx = 0;
5295    for (size_t j = 0; j < scalar_formats_count; j++) {
5296        switch (scalar_formats[j]) {
5297        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5298        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5299        case HAL_PIXEL_FORMAT_RAW10:
5300            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5301                available_min_durations[idx] = scalar_formats[j];
5302                available_min_durations[idx+1] =
5303                    gCamCapability[cameraId]->raw_dim[i].width;
5304                available_min_durations[idx+2] =
5305                    gCamCapability[cameraId]->raw_dim[i].height;
5306                available_min_durations[idx+3] =
5307                    gCamCapability[cameraId]->raw_min_duration[i];
5308                idx+=4;
5309            }
5310            break;
5311        default:
5312            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5313                available_min_durations[idx] = scalar_formats[j];
5314                available_min_durations[idx+1] =
5315                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5316                available_min_durations[idx+2] =
5317                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5318                available_min_durations[idx+3] =
5319                    gCamCapability[cameraId]->picture_min_duration[i];
5320                idx+=4;
5321            }
5322            break;
5323        }
5324    }
5325    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
5326                      &available_min_durations[0], idx);
5327
5328    Vector<int32_t> available_hfr_configs;
5329    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
5330        int32_t fps = 0;
5331        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
5332        case CAM_HFR_MODE_60FPS:
5333            fps = 60;
5334            break;
5335        case CAM_HFR_MODE_90FPS:
5336            fps = 90;
5337            break;
5338        case CAM_HFR_MODE_120FPS:
5339            fps = 120;
5340            break;
5341        case CAM_HFR_MODE_150FPS:
5342            fps = 150;
5343            break;
5344        case CAM_HFR_MODE_180FPS:
5345            fps = 180;
5346            break;
5347        case CAM_HFR_MODE_210FPS:
5348            fps = 210;
5349            break;
5350        case CAM_HFR_MODE_240FPS:
5351            fps = 240;
5352            break;
5353        case CAM_HFR_MODE_480FPS:
5354            fps = 480;
5355            break;
5356        case CAM_HFR_MODE_OFF:
5357        case CAM_HFR_MODE_MAX:
5358        default:
5359            break;
5360        }
5361
5362        if (fps > 0) {
5363            /* For each HFR frame rate, need to advertise one variable fps range
5364             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
5365             * [120, 120]. While camcorder preview alone is running [30, 120] is
5366             * set by the app. When video recording is started, [120, 120] is
5367             * set. This way sensor configuration does not change when recording
5368             * is started */
5369
5370            /* (width, height, fps_min, fps_max, batch_size_max) */
5371            available_hfr_configs.add(
5372                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5373            available_hfr_configs.add(
5374                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5375            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
5376            available_hfr_configs.add(fps);
5377            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5378
5379            /* (width, height, fps_min, fps_max, batch_size_max) */
5380            available_hfr_configs.add(
5381                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5382            available_hfr_configs.add(
5383                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5384            available_hfr_configs.add(fps);
5385            available_hfr_configs.add(fps);
5386            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5387       }
5388    }
5389    //Advertise HFR capability only if the property is set
5390    char prop[PROPERTY_VALUE_MAX];
5391    memset(prop, 0, sizeof(prop));
5392    property_get("persist.camera.hal3hfr.enable", prop, "0");
5393    uint8_t hfrEnable = (uint8_t)atoi(prop);
5394
5395    if(hfrEnable && available_hfr_configs.array()) {
5396        staticInfo.update(
5397                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
5398                available_hfr_configs.array(), available_hfr_configs.size());
5399    }
5400
5401    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
5402    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
5403                      &max_jpeg_size, 1);
5404
5405    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
5406    size_t size = 0;
5407    count = CAM_EFFECT_MODE_MAX;
5408    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
5409    for (size_t i = 0; i < count; i++) {
5410        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5411                gCamCapability[cameraId]->supported_effects[i]);
5412        if (NAME_NOT_FOUND != val) {
5413            avail_effects[size] = (uint8_t)val;
5414            size++;
5415        }
5416    }
5417    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
5418                      avail_effects,
5419                      size);
5420
5421    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
5422    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
5423    size_t supported_scene_modes_cnt = 0;
5424    count = CAM_SCENE_MODE_MAX;
5425    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
5426    for (size_t i = 0; i < count; i++) {
5427        int val = lookupFwkName(SCENE_MODES_MAP, METADATA_MAP_SIZE(SCENE_MODES_MAP),
5428                gCamCapability[cameraId]->supported_scene_modes[i]);
5429        if (NAME_NOT_FOUND != val) {
5430            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
5431            supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
5432            supported_scene_modes_cnt++;
5433        }
5434    }
5435    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
5436                      avail_scene_modes,
5437                      supported_scene_modes_cnt);
5438
5439    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
5440    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
5441                      supported_scene_modes_cnt,
5442                      CAM_SCENE_MODE_MAX,
5443                      scene_mode_overrides,
5444                      supported_indexes,
5445                      cameraId);
5446
5447    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
5448            scene_mode_overrides, supported_scene_modes_cnt * 3);
5449
5450    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
5451    size = 0;
5452    count = CAM_ANTIBANDING_MODE_MAX;
5453    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
5454    for (size_t i = 0; i < count; i++) {
5455        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5456                gCamCapability[cameraId]->supported_antibandings[i]);
5457        if (NAME_NOT_FOUND != val) {
5458            avail_antibanding_modes[size] = (uint8_t)val;
5459            size++;
5460        }
5461
5462    }
5463    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
5464                      avail_antibanding_modes,
5465                      size);
5466
5467    uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
5468    size = 0;
5469    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
5470    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
5471    if (0 == count) {
5472        avail_abberation_modes[0] =
5473                ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5474        size++;
5475    } else {
5476        for (size_t i = 0; i < count; i++) {
5477            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5478                    gCamCapability[cameraId]->aberration_modes[i]);
5479            if (NAME_NOT_FOUND != val) {
5480                avail_abberation_modes[size] = (uint8_t)val;
5481                size++;
5482            } else {
5483                ALOGE("%s: Invalid CAC mode %d", __func__,
5484                        gCamCapability[cameraId]->aberration_modes[i]);
5485                break;
5486            }
5487        }
5488
5489    }
5490    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
5491            avail_abberation_modes,
5492            size);
5493
5494    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
5495    size = 0;
5496    count = CAM_FOCUS_MODE_MAX;
5497    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
5498    for (size_t i = 0; i < count; i++) {
5499        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
5500                gCamCapability[cameraId]->supported_focus_modes[i]);
5501        if (NAME_NOT_FOUND != val) {
5502            avail_af_modes[size] = (uint8_t)val;
5503            size++;
5504        }
5505    }
5506    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
5507                      avail_af_modes,
5508                      size);
5509
5510    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
5511    size = 0;
5512    count = CAM_WB_MODE_MAX;
5513    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
5514    for (size_t i = 0; i < count; i++) {
5515        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5516                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
5517                gCamCapability[cameraId]->supported_white_balances[i]);
5518        if (NAME_NOT_FOUND != val) {
5519            avail_awb_modes[size] = (uint8_t)val;
5520            size++;
5521        }
5522    }
5523    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
5524                      avail_awb_modes,
5525                      size);
5526
5527    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
5528    count = CAM_FLASH_FIRING_LEVEL_MAX;
5529    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
5530            count);
5531    for (size_t i = 0; i < count; i++) {
5532        available_flash_levels[i] =
5533                gCamCapability[cameraId]->supported_firing_levels[i];
5534    }
5535    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
5536            available_flash_levels, count);
5537
5538    uint8_t flashAvailable;
5539    if (gCamCapability[cameraId]->flash_available)
5540        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
5541    else
5542        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
5543    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
5544            &flashAvailable, 1);
5545
5546    Vector<uint8_t> avail_ae_modes;
5547    count = CAM_AE_MODE_MAX;
5548    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
5549    for (size_t i = 0; i < count; i++) {
5550        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
5551    }
5552    if (flashAvailable) {
5553        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
5554        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
5555        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
5556    }
5557    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
5558                      avail_ae_modes.array(),
5559                      avail_ae_modes.size());
5560
5561    int32_t sensitivity_range[2];
5562    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
5563    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
5564    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
5565                      sensitivity_range,
5566                      sizeof(sensitivity_range) / sizeof(int32_t));
5567
5568    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
5569                      &gCamCapability[cameraId]->max_analog_sensitivity,
5570                      1);
5571
5572    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
5573    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
5574                      &sensor_orientation,
5575                      1);
5576
5577    int32_t max_output_streams[] = {
5578            MAX_STALLING_STREAMS,
5579            MAX_PROCESSED_STREAMS,
5580            MAX_RAW_STREAMS};
5581    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
5582            max_output_streams,
5583            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
5584
5585    uint8_t avail_leds = 0;
5586    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
5587                      &avail_leds, 0);
5588
5589    uint8_t focus_dist_calibrated;
5590    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
5591            gCamCapability[cameraId]->focus_dist_calibrated);
5592    if (NAME_NOT_FOUND != val) {
5593        focus_dist_calibrated = (uint8_t)val;
5594        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
5595                     &focus_dist_calibrated, 1);
5596    }
5597
5598    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
5599    size = 0;
5600    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
5601            MAX_TEST_PATTERN_CNT);
5602    for (size_t i = 0; i < count; i++) {
5603        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
5604                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
5605        if (NAME_NOT_FOUND != testpatternMode) {
5606            avail_testpattern_modes[size] = testpatternMode;
5607            size++;
5608        }
5609    }
5610    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
5611                      avail_testpattern_modes,
5612                      size);
5613
5614    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
5615    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
5616                      &max_pipeline_depth,
5617                      1);
5618
5619    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
5620    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
5621                      &partial_result_count,
5622                       1);
5623
5624    int32_t max_stall_duration = MAX_REPROCESS_STALL;
5625    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
5626
5627    Vector<uint8_t> available_capabilities;
5628    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
5629    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
5630    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
5631    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
5632    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
5633    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
5634    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
5635    if (hfrEnable) {
5636        available_capabilities.add(
5637                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
5638    }
5639
5640    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
5641        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
5642    }
5643    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
5644            available_capabilities.array(),
5645            available_capabilities.size());
5646
5647    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
5648    //BURST_CAPTURE.
5649    uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE;
5650    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1);
5651
5652    //awbLockAvailable to be set to true if capabilities has
5653    //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
5654    uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE;
5655    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1);
5656
5657    int32_t max_input_streams = 1;
5658    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
5659                      &max_input_streams,
5660                      1);
5661
5662    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1, HAL_PIXEL_FORMAT_BLOB,
5663            HAL_PIXEL_FORMAT_YCbCr_420_888, 1,HAL_PIXEL_FORMAT_BLOB};
5664    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
5665                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
5666
5667    int32_t max_latency = (limitedDevice) ?
5668            CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
5669    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
5670                      &max_latency,
5671                      1);
5672
5673    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
5674    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
5675            available_hot_pixel_modes,
5676            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
5677
5678    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
5679                                      ANDROID_EDGE_MODE_FAST};
5680    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
5681            available_edge_modes,
5682            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
5683
5684    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
5685                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
5686                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL};
5687    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
5688            available_noise_red_modes,
5689            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
5690
5691    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
5692                                         ANDROID_TONEMAP_MODE_FAST};
5693    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
5694            available_tonemap_modes,
5695            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
5696
5697    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
5698    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
5699            available_hot_pixel_map_modes,
5700            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
5701
5702    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
5703            gCamCapability[cameraId]->reference_illuminant1);
5704    if (NAME_NOT_FOUND != val) {
5705        uint8_t fwkReferenceIlluminant = (uint8_t)val;
5706        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
5707    }
5708
5709    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
5710            gCamCapability[cameraId]->reference_illuminant2);
5711    if (NAME_NOT_FOUND != val) {
5712        uint8_t fwkReferenceIlluminant = (uint8_t)val;
5713        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
5714    }
5715
5716    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
5717            (void *)gCamCapability[cameraId]->forward_matrix1,
5718            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
5719
5720    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
5721            (void *)gCamCapability[cameraId]->forward_matrix2,
5722            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
5723
5724    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
5725            (void *)gCamCapability[cameraId]->color_transform1,
5726            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
5727
5728    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
5729            (void *)gCamCapability[cameraId]->color_transform2,
5730            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
5731
5732    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
5733            (void *)gCamCapability[cameraId]->calibration_transform1,
5734            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
5735
5736    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
5737            (void *)gCamCapability[cameraId]->calibration_transform2,
5738            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
5739
5740    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
5741       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
5742       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
5743       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
5744       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
5745       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5746       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
5747       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
5748       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
5749       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
5750       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
5751       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
5752       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
5753       ANDROID_JPEG_GPS_COORDINATES,
5754       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
5755       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
5756       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
5757       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
5758       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
5759       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
5760       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
5761       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
5762       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
5763       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
5764       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
5765       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
5766       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
5767       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
5768       ANDROID_BLACK_LEVEL_LOCK };
5769
5770    size_t request_keys_cnt =
5771            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
5772    Vector<int32_t> available_request_keys;
5773    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
5774    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
5775        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
5776    }
5777    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
5778            available_request_keys.array(), available_request_keys.size());
5779
5780    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
5781       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
5782       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
5783       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
5784       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
5785       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
5786       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
5787       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
5788       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
5789       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
5790       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
5791       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
5792       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
5793       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
5794       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5795       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
5796       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
5797       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
5798       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
5799       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5800       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
5801       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
5802       ANDROID_STATISTICS_FACE_SCORES};
5803    size_t result_keys_cnt =
5804            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
5805
5806    Vector<int32_t> available_result_keys;
5807    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
5808    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
5809        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
5810    }
5811    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
5812       available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
5813       available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
5814    }
5815    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
5816            available_result_keys.array(), available_result_keys.size());
5817
5818    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
5819       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5820       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
5821       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
5822       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
5823       ANDROID_SCALER_CROPPING_TYPE,
5824       ANDROID_SYNC_MAX_LATENCY,
5825       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5826       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
5827       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5828       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
5829       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
5830       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5831       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5832       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5833       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5834       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5835       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5836       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
5837       ANDROID_LENS_FACING,
5838       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
5839       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
5840       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
5841       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
5842       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5843       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
5844       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5845       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
5846       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
5847       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
5848       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
5849       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
5850       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5851       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5852       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5853       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5854       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
5855       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
5856       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
5857       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5858       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5859       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5860       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5861       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
5862       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
5863       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
5864       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
5865       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
5866       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
5867    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
5868                      available_characteristics_keys,
5869                      sizeof(available_characteristics_keys)/sizeof(int32_t));
5870
5871    /*available stall durations depend on the hw + sw and will be different for different devices */
5872    /*have to add for raw after implementation*/
5873    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
5874    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
5875
5876    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5877    size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
5878            MAX_SIZES_CNT);
5879    size_t available_stall_size = count * 4;
5880    int64_t available_stall_durations[available_stall_size];
5881    idx = 0;
5882    for (uint32_t j = 0; j < stall_formats_count; j++) {
5883       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
5884          for (uint32_t i = 0; i < count; i++) {
5885             available_stall_durations[idx]   = stall_formats[j];
5886             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5887             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5888             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
5889             idx+=4;
5890          }
5891       } else {
5892          for (uint32_t i = 0; i < raw_count; i++) {
5893             available_stall_durations[idx]   = stall_formats[j];
5894             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
5895             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
5896             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
5897             idx+=4;
5898          }
5899       }
5900    }
5901    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
5902                      available_stall_durations,
5903                      idx);
5904    //QCAMERA3_OPAQUE_RAW
5905    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
5906    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
5907    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
5908    case LEGACY_RAW:
5909        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
5910            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
5911        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
5912            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
5913        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
5914            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
5915        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
5916        break;
5917    case MIPI_RAW:
5918        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
5919            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
5920        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
5921            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
5922        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
5923            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
5924        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
5925        break;
5926    default:
5927        ALOGE("%s: unknown opaque_raw_format %d", __func__,
5928                gCamCapability[cameraId]->opaque_raw_fmt);
5929        break;
5930    }
5931    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
5932
5933    int32_t strides[3*raw_count];
5934    for (size_t i = 0; i < raw_count; i++) {
5935        cam_stream_buf_plane_info_t buf_planes;
5936        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
5937        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
5938        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
5939            &gCamCapability[cameraId]->padding_info, &buf_planes);
5940        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
5941    }
5942    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
5943            3*raw_count);
5944
5945    gStaticMetadata[cameraId] = staticInfo.release();
5946    return rc;
5947}
5948
5949/*===========================================================================
5950 * FUNCTION   : makeTable
5951 *
5952 * DESCRIPTION: make a table of sizes
5953 *
5954 * PARAMETERS :
5955 *
5956 *
5957 *==========================================================================*/
5958void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
5959        size_t max_size, int32_t *sizeTable)
5960{
5961    size_t j = 0;
5962    if (size > max_size) {
5963       size = max_size;
5964    }
5965    for (size_t i = 0; i < size; i++) {
5966        sizeTable[j] = dimTable[i].width;
5967        sizeTable[j+1] = dimTable[i].height;
5968        j+=2;
5969    }
5970}
5971
5972/*===========================================================================
5973 * FUNCTION   : makeFPSTable
5974 *
5975 * DESCRIPTION: make a table of fps ranges
5976 *
5977 * PARAMETERS :
5978 *
5979 *==========================================================================*/
5980void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
5981        size_t max_size, int32_t *fpsRangesTable)
5982{
5983    size_t j = 0;
5984    if (size > max_size) {
5985       size = max_size;
5986    }
5987    for (size_t i = 0; i < size; i++) {
5988        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
5989        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
5990        j+=2;
5991    }
5992}
5993
5994/*===========================================================================
5995 * FUNCTION   : makeOverridesList
5996 *
5997 * DESCRIPTION: make a list of scene mode overrides
5998 *
5999 * PARAMETERS :
6000 *
6001 *
6002 *==========================================================================*/
6003void QCamera3HardwareInterface::makeOverridesList(
6004        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6005        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6006{
6007    /*daemon will give a list of overrides for all scene modes.
6008      However we should send the fwk only the overrides for the scene modes
6009      supported by the framework*/
6010    size_t j = 0;
6011    if (size > max_size) {
6012       size = max_size;
6013    }
6014    size_t focus_count = CAM_FOCUS_MODE_MAX;
6015    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6016            focus_count);
6017    for (size_t i = 0; i < size; i++) {
6018        bool supt = false;
6019        size_t index = supported_indexes[i];
6020        overridesList[j] = gCamCapability[camera_id]->flash_available ?
6021                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6022        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6023                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6024                overridesTable[index].awb_mode);
6025        if (NAME_NOT_FOUND != val) {
6026            overridesList[j+1] = (uint8_t)val;
6027        }
6028        uint8_t focus_override = overridesTable[index].af_mode;
6029        for (size_t k = 0; k < focus_count; k++) {
6030           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6031              supt = true;
6032              break;
6033           }
6034        }
6035        if (supt) {
6036            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6037                    focus_override);
6038            if (NAME_NOT_FOUND != val) {
6039                overridesList[j+2] = (uint8_t)val;
6040            }
6041        } else {
6042           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6043        }
6044        j+=3;
6045    }
6046}
6047
6048/*===========================================================================
6049 * FUNCTION   : filterJpegSizes
6050 *
6051 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6052 *              could be downscaled to
6053 *
6054 * PARAMETERS :
6055 *
6056 * RETURN     : length of jpegSizes array
6057 *==========================================================================*/
6058
6059size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6060        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6061        uint8_t downscale_factor)
6062{
6063    if (0 == downscale_factor) {
6064        downscale_factor = 1;
6065    }
6066
6067    int32_t min_width = active_array_size.width / downscale_factor;
6068    int32_t min_height = active_array_size.height / downscale_factor;
6069    size_t jpegSizesCnt = 0;
6070    if (processedSizesCnt > maxCount) {
6071        processedSizesCnt = maxCount;
6072    }
6073    for (size_t i = 0; i < processedSizesCnt; i+=2) {
6074        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
6075            jpegSizes[jpegSizesCnt] = processedSizes[i];
6076            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
6077            jpegSizesCnt += 2;
6078        }
6079    }
6080    return jpegSizesCnt;
6081}
6082
6083/*===========================================================================
6084 * FUNCTION   : getPreviewHalPixelFormat
6085 *
6086 * DESCRIPTION: convert the format to type recognized by framework
6087 *
6088 * PARAMETERS : format : the format from backend
6089 *
6090 ** RETURN    : format recognized by framework
6091 *
6092 *==========================================================================*/
6093int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
6094{
6095    int32_t halPixelFormat;
6096
6097    switch (format) {
6098    case CAM_FORMAT_YUV_420_NV12:
6099        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
6100        break;
6101    case CAM_FORMAT_YUV_420_NV21:
6102        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6103        break;
6104    case CAM_FORMAT_YUV_420_NV21_ADRENO:
6105        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
6106        break;
6107    case CAM_FORMAT_YUV_420_YV12:
6108        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
6109        break;
6110    case CAM_FORMAT_YUV_422_NV16:
6111    case CAM_FORMAT_YUV_422_NV61:
6112    default:
6113        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6114        break;
6115    }
6116    return halPixelFormat;
6117}
6118
6119/*===========================================================================
6120 * FUNCTION   : computeNoiseModelEntryS
6121 *
6122 * DESCRIPTION: function to map a given sensitivity to the S noise
6123 *              model parameters in the DNG noise model.
6124 *
6125 * PARAMETERS : sens : the sensor sensitivity
6126 *
6127 ** RETURN    : S (sensor amplification) noise
6128 *
6129 *==========================================================================*/
6130double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
6131    double s = gCamCapability[mCameraId]->gradient_S * sens +
6132            gCamCapability[mCameraId]->offset_S;
6133    return ((s < 0.0) ? 0.0 : s);
6134}
6135
6136/*===========================================================================
6137 * FUNCTION   : computeNoiseModelEntryO
6138 *
6139 * DESCRIPTION: function to map a given sensitivity to the O noise
6140 *              model parameters in the DNG noise model.
6141 *
6142 * PARAMETERS : sens : the sensor sensitivity
6143 *
6144 ** RETURN    : O (sensor readout) noise
6145 *
6146 *==========================================================================*/
6147double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
6148    double o = gCamCapability[mCameraId]->gradient_O * sens +
6149            gCamCapability[mCameraId]->offset_O;
6150    return ((o < 0.0) ? 0.0 : o);
6151}
6152
6153/*===========================================================================
6154 * FUNCTION   : getSensorSensitivity
6155 *
6156 * DESCRIPTION: convert iso_mode to an integer value
6157 *
6158 * PARAMETERS : iso_mode : the iso_mode supported by sensor
6159 *
6160 ** RETURN    : sensitivity supported by sensor
6161 *
6162 *==========================================================================*/
6163int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
6164{
6165    int32_t sensitivity;
6166
6167    switch (iso_mode) {
6168    case CAM_ISO_MODE_100:
6169        sensitivity = 100;
6170        break;
6171    case CAM_ISO_MODE_200:
6172        sensitivity = 200;
6173        break;
6174    case CAM_ISO_MODE_400:
6175        sensitivity = 400;
6176        break;
6177    case CAM_ISO_MODE_800:
6178        sensitivity = 800;
6179        break;
6180    case CAM_ISO_MODE_1600:
6181        sensitivity = 1600;
6182        break;
6183    default:
6184        sensitivity = -1;
6185        break;
6186    }
6187    return sensitivity;
6188}
6189
6190/*===========================================================================
6191 * FUNCTION   : getCamInfo
6192 *
6193 * DESCRIPTION: query camera capabilities
6194 *
6195 * PARAMETERS :
6196 *   @cameraId  : camera Id
6197 *   @info      : camera info struct to be filled in with camera capabilities
6198 *
6199 * RETURN     : int type of status
6200 *              NO_ERROR  -- success
6201 *              none-zero failure code
6202 *==========================================================================*/
6203int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
6204        struct camera_info *info)
6205{
6206    ATRACE_CALL();
6207    int rc = 0;
6208
6209    pthread_mutex_lock(&gCamLock);
6210    if (NULL == gCamCapability[cameraId]) {
6211        rc = initCapabilities(cameraId);
6212        if (rc < 0) {
6213            pthread_mutex_unlock(&gCamLock);
6214            return rc;
6215        }
6216    }
6217
6218    if (NULL == gStaticMetadata[cameraId]) {
6219        rc = initStaticMetadata(cameraId);
6220        if (rc < 0) {
6221            pthread_mutex_unlock(&gCamLock);
6222            return rc;
6223        }
6224    }
6225
6226    switch(gCamCapability[cameraId]->position) {
6227    case CAM_POSITION_BACK:
6228        info->facing = CAMERA_FACING_BACK;
6229        break;
6230
6231    case CAM_POSITION_FRONT:
6232        info->facing = CAMERA_FACING_FRONT;
6233        break;
6234
6235    default:
6236        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
6237        rc = -1;
6238        break;
6239    }
6240
6241
6242    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
6243    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
6244    info->static_camera_characteristics = gStaticMetadata[cameraId];
6245
6246    pthread_mutex_unlock(&gCamLock);
6247
6248    return rc;
6249}
6250
6251/*===========================================================================
6252 * FUNCTION   : translateCapabilityToMetadata
6253 *
6254 * DESCRIPTION: translate the capability into camera_metadata_t
6255 *
6256 * PARAMETERS : type of the request
6257 *
6258 *
6259 * RETURN     : success: camera_metadata_t*
6260 *              failure: NULL
6261 *
6262 *==========================================================================*/
6263camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
6264{
6265    if (mDefaultMetadata[type] != NULL) {
6266        return mDefaultMetadata[type];
6267    }
6268    //first time we are handling this request
6269    //fill up the metadata structure using the wrapper class
6270    CameraMetadata settings;
6271    //translate from cam_capability_t to camera_metadata_tag_t
6272    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
6273    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
6274    int32_t defaultRequestID = 0;
6275    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
6276
6277    /* OIS disable */
6278    char ois_prop[PROPERTY_VALUE_MAX];
6279    memset(ois_prop, 0, sizeof(ois_prop));
6280    property_get("persist.camera.ois.disable", ois_prop, "0");
6281    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
6282
6283    /* Force video to use OIS */
6284    char videoOisProp[PROPERTY_VALUE_MAX];
6285    memset(videoOisProp, 0, sizeof(videoOisProp));
6286    property_get("persist.camera.ois.video", videoOisProp, "1");
6287    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
6288
6289    uint8_t controlIntent = 0;
6290    uint8_t focusMode;
6291    uint8_t vsMode;
6292    uint8_t optStabMode;
6293    uint8_t cacMode;
6294    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6295    switch (type) {
6296      case CAMERA3_TEMPLATE_PREVIEW:
6297        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
6298        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6299        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6300        break;
6301      case CAMERA3_TEMPLATE_STILL_CAPTURE:
6302        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
6303        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6304        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6305        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
6306        settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
6307        break;
6308      case CAMERA3_TEMPLATE_VIDEO_RECORD:
6309        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
6310        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6311        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6312        if (forceVideoOis)
6313            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6314        break;
6315      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
6316        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
6317        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6318        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6319        if (forceVideoOis)
6320            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6321        break;
6322      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
6323        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
6324        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6325        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6326        break;
6327      case CAMERA3_TEMPLATE_MANUAL:
6328        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
6329        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6330        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6331        break;
6332      default:
6333        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
6334        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6335        break;
6336    }
6337    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
6338    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
6339    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
6340        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6341    }
6342    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
6343
6344    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6345            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
6346        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6347    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6348            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
6349            || ois_disable)
6350        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6351    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
6352
6353    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6354            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
6355
6356    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
6357    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
6358
6359    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
6360    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
6361
6362    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
6363    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
6364
6365    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
6366    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
6367
6368    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
6369    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
6370
6371    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
6372    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
6373
6374    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
6375    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
6376
6377    /*flash*/
6378    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
6379    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
6380
6381    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
6382    settings.update(ANDROID_FLASH_FIRING_POWER,
6383            &flashFiringLevel, 1);
6384
6385    /* lens */
6386    float default_aperture = gCamCapability[mCameraId]->apertures[0];
6387    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
6388
6389    if (gCamCapability[mCameraId]->filter_densities_count) {
6390        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
6391        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
6392                        gCamCapability[mCameraId]->filter_densities_count);
6393    }
6394
6395    float default_focal_length = gCamCapability[mCameraId]->focal_length;
6396    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
6397
6398    float default_focus_distance = 0;
6399    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
6400
6401    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
6402    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
6403
6404    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6405    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6406
6407    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
6408    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
6409
6410    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
6411    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
6412
6413    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
6414    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
6415
6416    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
6417    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
6418
6419    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6420    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6421
6422    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
6423    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
6424
6425    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
6426    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
6427
6428    /* Exposure time(Update the Min Exposure Time)*/
6429    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
6430    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
6431
6432    /* frame duration */
6433    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
6434    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
6435
6436    /* sensitivity */
6437    static const int32_t default_sensitivity = 100;
6438    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
6439
6440    /*edge mode*/
6441    static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
6442    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
6443
6444    /*noise reduction mode*/
6445    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6446    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
6447
6448    /*color correction mode*/
6449    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
6450    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
6451
6452    /*transform matrix mode*/
6453    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6454    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
6455
6456    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
6457    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
6458
6459    int32_t scaler_crop_region[4];
6460    scaler_crop_region[0] = 0;
6461    scaler_crop_region[1] = 0;
6462    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
6463    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
6464    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
6465
6466    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
6467    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
6468
6469    /*focus distance*/
6470    float focus_distance = 0.0;
6471    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
6472
6473    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
6474    float max_range = 0.0;
6475    float max_fixed_fps = 0.0;
6476    int32_t fps_range[2] = {0, 0};
6477    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
6478            i++) {
6479        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
6480            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6481        if (type == CAMERA3_TEMPLATE_PREVIEW ||
6482                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
6483                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
6484            if (range > max_range) {
6485                fps_range[0] =
6486                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6487                fps_range[1] =
6488                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6489                max_range = range;
6490            }
6491        } else {
6492            if (range < 0.01 && max_fixed_fps <
6493                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
6494                fps_range[0] =
6495                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6496                fps_range[1] =
6497                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6498                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6499            }
6500        }
6501    }
6502    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
6503
6504    /*precapture trigger*/
6505    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
6506    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
6507
6508    /*af trigger*/
6509    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
6510    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
6511
6512    /* ae & af regions */
6513    int32_t active_region[] = {
6514            gCamCapability[mCameraId]->active_array_size.left,
6515            gCamCapability[mCameraId]->active_array_size.top,
6516            gCamCapability[mCameraId]->active_array_size.left +
6517                    gCamCapability[mCameraId]->active_array_size.width,
6518            gCamCapability[mCameraId]->active_array_size.top +
6519                    gCamCapability[mCameraId]->active_array_size.height,
6520            0};
6521    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
6522            sizeof(active_region) / sizeof(active_region[0]));
6523    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
6524            sizeof(active_region) / sizeof(active_region[0]));
6525
6526    /* black level lock */
6527    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
6528    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
6529
6530    /* face detect mode */
6531    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
6532    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
6533
6534    /* lens shading map mode */
6535    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
6536    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
6537        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
6538    }
6539    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
6540
6541    //special defaults for manual template
6542    if (type == CAMERA3_TEMPLATE_MANUAL) {
6543        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
6544        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
6545
6546        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
6547        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
6548
6549        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
6550        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
6551
6552        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
6553        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
6554
6555        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
6556        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
6557
6558        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
6559        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
6560    }
6561
6562    /* CDS default */
6563    char prop[PROPERTY_VALUE_MAX];
6564    memset(prop, 0, sizeof(prop));
6565    property_get("persist.camera.CDS", prop, "Auto");
6566    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
6567    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
6568    if (CAM_CDS_MODE_MAX == cds_mode) {
6569        cds_mode = CAM_CDS_MODE_AUTO;
6570    }
6571    int32_t mode = cds_mode;
6572    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
6573
6574    mDefaultMetadata[type] = settings.release();
6575
6576    return mDefaultMetadata[type];
6577}
6578
6579/*===========================================================================
6580 * FUNCTION   : setFrameParameters
6581 *
6582 * DESCRIPTION: set parameters per frame as requested in the metadata from
6583 *              framework
6584 *
6585 * PARAMETERS :
6586 *   @request   : request that needs to be serviced
6587 *   @streamID : Stream ID of all the requested streams
6588 *   @blob_request: Whether this request is a blob request or not
6589 *
6590 * RETURN     : success: NO_ERROR
6591 *              failure:
6592 *==========================================================================*/
6593int QCamera3HardwareInterface::setFrameParameters(
6594                    camera3_capture_request_t *request,
6595                    cam_stream_ID_t streamID,
6596                    int blob_request,
6597                    uint32_t snapshotStreamId)
6598{
6599    /*translate from camera_metadata_t type to parm_type_t*/
6600    int rc = 0;
6601    int32_t hal_version = CAM_HAL_V3;
6602
6603    clear_metadata_buffer(mParameters);
6604    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
6605        ALOGE("%s: Failed to set hal version in the parameters", __func__);
6606        return BAD_VALUE;
6607    }
6608
6609    /*we need to update the frame number in the parameters*/
6610    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
6611            request->frame_number)) {
6612        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
6613        return BAD_VALUE;
6614    }
6615
6616    /* Update stream id of all the requested buffers */
6617    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
6618        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
6619        return BAD_VALUE;
6620    }
6621
6622    if (mUpdateDebugLevel) {
6623        uint32_t dummyDebugLevel = 0;
6624        /* The value of dummyDebugLevel is irrelavent. On
6625         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
6626        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
6627                dummyDebugLevel)) {
6628            ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
6629            return BAD_VALUE;
6630        }
6631        mUpdateDebugLevel = false;
6632    }
6633
6634    if(request->settings != NULL){
6635        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
6636        if (blob_request)
6637            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
6638    }
6639
6640    return rc;
6641}
6642
6643/*===========================================================================
6644 * FUNCTION   : setReprocParameters
6645 *
6646 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
6647 *              return it.
6648 *
6649 * PARAMETERS :
6650 *   @request   : request that needs to be serviced
6651 *
6652 * RETURN     : success: NO_ERROR
6653 *              failure:
6654 *==========================================================================*/
6655int32_t QCamera3HardwareInterface::setReprocParameters(
6656        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
6657        uint32_t snapshotStreamId)
6658{
6659    /*translate from camera_metadata_t type to parm_type_t*/
6660    int rc = 0;
6661
6662    if (NULL == request->settings){
6663        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
6664        return BAD_VALUE;
6665    }
6666
6667    if (NULL == reprocParam) {
6668        ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
6669        return BAD_VALUE;
6670    }
6671    clear_metadata_buffer(reprocParam);
6672
6673    /*we need to update the frame number in the parameters*/
6674    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
6675            request->frame_number)) {
6676        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
6677        return BAD_VALUE;
6678    }
6679
6680    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
6681    if (rc < 0) {
6682        ALOGE("%s: Failed to translate reproc request", __func__);
6683        return rc;
6684    }
6685
6686    CameraMetadata frame_settings;
6687    frame_settings = request->settings;
6688    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
6689            frame_settings.exists(QCAMERA3_CROP_REPROCESS) &&
6690            frame_settings.exists(QCAMERA3_CROP_STREAM_ID_REPROCESS)) {
6691        int32_t *crop_count =
6692                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
6693        int32_t *crop_data =
6694                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
6695        int32_t *crop_stream_ids =
6696                frame_settings.find(QCAMERA3_CROP_STREAM_ID_REPROCESS).data.i32;
6697        int32_t *roi_map =
6698                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
6699        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
6700            bool found = false;
6701            int32_t i;
6702            for (i = 0; i < *crop_count; i++) {
6703#ifdef __LP64__
6704                int32_t id = (int32_t)
6705                        ((((int64_t)request->input_buffer->stream) & 0x0000FFFF) ^
6706                                (((int64_t)request->input_buffer->stream) >> 0x20 & 0x0000FFFF));
6707#else
6708                int32_t id = (int32_t) request->input_buffer->stream;
6709#endif
6710                if (crop_stream_ids[i] == id) {
6711                    found = true;
6712                    break;
6713                }
6714            }
6715
6716            if (found) {
6717                cam_crop_data_t crop_meta;
6718                size_t roi_map_idx = i*4;
6719                size_t crop_info_idx = i*4;
6720                memset(&crop_meta, 0, sizeof(cam_crop_data_t));
6721                crop_meta.num_of_streams = 1;
6722                crop_meta.crop_info[0].crop.left   = crop_data[crop_info_idx++];
6723                crop_meta.crop_info[0].crop.top    = crop_data[crop_info_idx++];
6724                crop_meta.crop_info[0].crop.width  = crop_data[crop_info_idx++];
6725                crop_meta.crop_info[0].crop.height = crop_data[crop_info_idx++];
6726
6727                crop_meta.crop_info[0].roi_map.left =
6728                        roi_map[roi_map_idx++];
6729                crop_meta.crop_info[0].roi_map.top =
6730                        roi_map[roi_map_idx++];
6731                crop_meta.crop_info[0].roi_map.width =
6732                        roi_map[roi_map_idx++];
6733                crop_meta.crop_info[0].roi_map.height =
6734                        roi_map[roi_map_idx++];
6735
6736                if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
6737                    rc = BAD_VALUE;
6738                }
6739                CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
6740                        __func__,
6741                        request->input_buffer->stream,
6742                        crop_meta.crop_info[0].crop.left,
6743                        crop_meta.crop_info[0].crop.top,
6744                        crop_meta.crop_info[0].crop.width,
6745                        crop_meta.crop_info[0].crop.height);
6746                CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
6747                        __func__,
6748                        request->input_buffer->stream,
6749                        crop_meta.crop_info[0].roi_map.left,
6750                        crop_meta.crop_info[0].roi_map.top,
6751                        crop_meta.crop_info[0].roi_map.width,
6752                        crop_meta.crop_info[0].roi_map.height);
6753            } else {
6754                ALOGE("%s: No matching reprocess input stream found!", __func__);
6755            }
6756        } else {
6757            ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
6758        }
6759    }
6760
6761    return rc;
6762}
6763
6764/*===========================================================================
6765 * FUNCTION   : setHalFpsRange
6766 *
6767 * DESCRIPTION: set FPS range parameter
6768 *
6769 *
6770 * PARAMETERS :
6771 *   @settings    : Metadata from framework
6772 *   @hal_metadata: Metadata buffer
6773 *
6774 *
6775 * RETURN     : success: NO_ERROR
6776 *              failure:
6777 *==========================================================================*/
6778int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
6779        metadata_buffer_t *hal_metadata)
6780{
6781    int32_t rc = NO_ERROR;
6782    cam_fps_range_t fps_range;
6783    fps_range.min_fps = (float)
6784            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
6785    fps_range.max_fps = (float)
6786            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
6787    fps_range.video_min_fps = fps_range.min_fps;
6788    fps_range.video_max_fps = fps_range.max_fps;
6789
6790    CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
6791            fps_range.min_fps, fps_range.max_fps);
6792    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
6793     * follows:
6794     * ---------------------------------------------------------------|
6795     *      Video stream is absent in configure_streams               |
6796     *    (Camcorder preview before the first video record            |
6797     * ---------------------------------------------------------------|
6798     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
6799     *                   |             |             | vid_min/max_fps|
6800     * ---------------------------------------------------------------|
6801     *        NO         |  [ 30, 240] |     30      |  [ 30,  30]    |
6802     *                   |-------------|-------------|----------------|
6803     *                   |  [240, 240] |     30      |  [ 30,  30]    |
6804     * ---------------------------------------------------------------|
6805     *     Video stream is present in configure_streams               |
6806     * ---------------------------------------------------------------|
6807     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
6808     *                   |             |             | vid_min/max_fps|
6809     * ---------------------------------------------------------------|
6810     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
6811     * (camcorder prev   |-------------|-------------|----------------|
6812     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
6813     *  is stopped)      |             |             |                |
6814     * ---------------------------------------------------------------|
6815     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
6816     *                   |-------------|-------------|----------------|
6817     *                   |  [240, 240] |     240     |  [240, 240]    |
6818     * ---------------------------------------------------------------|
6819     */
6820    mBatchSize = 0;
6821    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
6822        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
6823                fps_range.max_fps);
6824        if (NAME_NOT_FOUND != val) {
6825            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
6826            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
6827                return BAD_VALUE;
6828            }
6829
6830            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
6831                mHFRVideoFps = fps_range.max_fps;
6832                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
6833                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
6834                    mBatchSize = MAX_HFR_BATCH_SIZE;
6835                }
6836             }
6837            CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
6838
6839            if (!m_bIsVideo) {
6840                if (fps_range.min_fps > PREVIEW_FPS_FOR_HFR) {
6841                    fps_range.min_fps = PREVIEW_FPS_FOR_HFR;
6842                }
6843                fps_range.max_fps = fps_range.min_fps;
6844                fps_range.video_max_fps = fps_range.min_fps;
6845            } else {
6846                fps_range.min_fps = fps_range.video_max_fps;
6847            }
6848            fps_range.video_min_fps = fps_range.video_max_fps;
6849         }
6850    }
6851    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
6852        return BAD_VALUE;
6853    }
6854    CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
6855            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
6856    return rc;
6857}
6858
6859/*===========================================================================
6860 * FUNCTION   : translateToHalMetadata
6861 *
6862 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
6863 *
6864 *
6865 * PARAMETERS :
6866 *   @request  : request sent from framework
6867 *
6868 *
6869 * RETURN     : success: NO_ERROR
6870 *              failure:
6871 *==========================================================================*/
6872int QCamera3HardwareInterface::translateToHalMetadata
6873                                  (const camera3_capture_request_t *request,
6874                                   metadata_buffer_t *hal_metadata,
6875                                   uint32_t snapshotStreamId)
6876{
6877    int rc = 0;
6878    CameraMetadata frame_settings;
6879    frame_settings = request->settings;
6880
6881    /* Do not change the order of the following list unless you know what you are
6882     * doing.
6883     * The order is laid out in such a way that parameters in the front of the table
6884     * may be used to override the parameters later in the table. Examples are:
6885     * 1. META_MODE should precede AEC/AWB/AF MODE
6886     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
6887     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
6888     * 4. Any mode should precede it's corresponding settings
6889     */
6890    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
6891        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
6892        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
6893            rc = BAD_VALUE;
6894        }
6895        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
6896        if (rc != NO_ERROR) {
6897            ALOGE("%s: extractSceneMode failed", __func__);
6898        }
6899    }
6900
6901    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
6902        uint8_t fwk_aeMode =
6903            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
6904        uint8_t aeMode;
6905        int32_t redeye;
6906
6907        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
6908            aeMode = CAM_AE_MODE_OFF;
6909        } else {
6910            aeMode = CAM_AE_MODE_ON;
6911        }
6912        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
6913            redeye = 1;
6914        } else {
6915            redeye = 0;
6916        }
6917
6918        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6919                fwk_aeMode);
6920        if (NAME_NOT_FOUND != val) {
6921            int32_t flashMode = (int32_t)val;
6922            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_LED_MODE, flashMode);
6923            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
6924        }
6925
6926        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
6927        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
6928            rc = BAD_VALUE;
6929        }
6930    }
6931
6932    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
6933        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
6934        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6935                fwk_whiteLevel);
6936        if (NAME_NOT_FOUND != val) {
6937            uint8_t whiteLevel = (uint8_t)val;
6938            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
6939                rc = BAD_VALUE;
6940            }
6941        }
6942    }
6943
6944    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
6945        uint8_t fwk_cacMode =
6946                frame_settings.find(
6947                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
6948        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6949                fwk_cacMode);
6950        if (NAME_NOT_FOUND != val) {
6951            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
6952            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
6953                rc = BAD_VALUE;
6954            }
6955        } else {
6956            ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
6957        }
6958    }
6959
6960    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
6961        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
6962        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6963                fwk_focusMode);
6964        if (NAME_NOT_FOUND != val) {
6965            uint8_t focusMode = (uint8_t)val;
6966            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
6967                rc = BAD_VALUE;
6968            }
6969        }
6970    }
6971
6972    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
6973        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
6974        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_FOCUS_DISTANCE,
6975                focalDistance)) {
6976            rc = BAD_VALUE;
6977        }
6978    }
6979
6980    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
6981        uint8_t fwk_antibandingMode =
6982                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
6983        int val = lookupHalName(ANTIBANDING_MODES_MAP,
6984                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
6985        if (NAME_NOT_FOUND != val) {
6986            uint32_t hal_antibandingMode = (uint32_t)val;
6987            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_ANTIBANDING,
6988                    hal_antibandingMode)) {
6989                rc = BAD_VALUE;
6990            }
6991        }
6992    }
6993
6994    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
6995        int32_t expCompensation = frame_settings.find(
6996                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
6997        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
6998            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
6999        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
7000            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
7001        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
7002                expCompensation)) {
7003            rc = BAD_VALUE;
7004        }
7005    }
7006
7007    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
7008        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
7009        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
7010            rc = BAD_VALUE;
7011        }
7012    }
7013    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
7014        rc = setHalFpsRange(frame_settings, hal_metadata);
7015        if (rc != NO_ERROR) {
7016            ALOGE("%s: setHalFpsRange failed", __func__);
7017        }
7018    }
7019
7020    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
7021        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
7022        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
7023            rc = BAD_VALUE;
7024        }
7025    }
7026
7027    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
7028        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
7029        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7030                fwk_effectMode);
7031        if (NAME_NOT_FOUND != val) {
7032            uint8_t effectMode = (uint8_t)val;
7033            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_EFFECT, effectMode)) {
7034                rc = BAD_VALUE;
7035            }
7036        }
7037    }
7038
7039    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
7040        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
7041        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
7042                colorCorrectMode)) {
7043            rc = BAD_VALUE;
7044        }
7045    }
7046
7047    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
7048        cam_color_correct_gains_t colorCorrectGains;
7049        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
7050            colorCorrectGains.gains[i] =
7051                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
7052        }
7053        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
7054                colorCorrectGains)) {
7055            rc = BAD_VALUE;
7056        }
7057    }
7058
7059    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
7060        cam_color_correct_matrix_t colorCorrectTransform;
7061        cam_rational_type_t transform_elem;
7062        size_t num = 0;
7063        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
7064           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
7065              transform_elem.numerator =
7066                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
7067              transform_elem.denominator =
7068                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
7069              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
7070              num++;
7071           }
7072        }
7073        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
7074                colorCorrectTransform)) {
7075            rc = BAD_VALUE;
7076        }
7077    }
7078
7079    cam_trigger_t aecTrigger;
7080    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
7081    aecTrigger.trigger_id = -1;
7082    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
7083        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
7084        aecTrigger.trigger =
7085            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
7086        aecTrigger.trigger_id =
7087            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
7088        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
7089                aecTrigger)) {
7090            rc = BAD_VALUE;
7091        }
7092        CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
7093                aecTrigger.trigger, aecTrigger.trigger_id);
7094    }
7095
7096    /*af_trigger must come with a trigger id*/
7097    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
7098        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
7099        cam_trigger_t af_trigger;
7100        af_trigger.trigger =
7101            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
7102        af_trigger.trigger_id =
7103            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
7104        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
7105            rc = BAD_VALUE;
7106        }
7107        CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
7108                af_trigger.trigger, af_trigger.trigger_id);
7109    }
7110
7111    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
7112        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
7113        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DEMOSAIC, demosaic)) {
7114            rc = BAD_VALUE;
7115        }
7116    }
7117
7118    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
7119        cam_edge_application_t edge_application;
7120        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
7121        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
7122            edge_application.sharpness = 0;
7123        } else {
7124            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
7125                uint8_t edgeStrength = frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
7126                edge_application.sharpness = (int32_t)edgeStrength;
7127            } else {
7128                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
7129            }
7130        }
7131        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
7132            rc = BAD_VALUE;
7133        }
7134    }
7135
7136    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7137        int32_t respectFlashMode = 1;
7138        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7139            uint8_t fwk_aeMode =
7140                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7141            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
7142                respectFlashMode = 0;
7143                CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
7144                    __func__);
7145            }
7146        }
7147        if (respectFlashMode) {
7148            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7149                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7150            CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
7151            // To check: CAM_INTF_META_FLASH_MODE usage
7152            if (NAME_NOT_FOUND != val) {
7153                uint8_t flashMode = (uint8_t)val;
7154                if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_LED_MODE, flashMode)) {
7155                    rc = BAD_VALUE;
7156                }
7157            }
7158        }
7159    }
7160
7161    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
7162        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
7163        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FLASH_POWER, flashPower)) {
7164            rc = BAD_VALUE;
7165        }
7166    }
7167
7168    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
7169        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
7170        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
7171                flashFiringTime)) {
7172            rc = BAD_VALUE;
7173        }
7174    }
7175
7176    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
7177        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
7178        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
7179                hotPixelMode)) {
7180            rc = BAD_VALUE;
7181        }
7182    }
7183
7184    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
7185        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
7186        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_APERTURE,
7187                lensAperture)) {
7188            rc = BAD_VALUE;
7189        }
7190    }
7191
7192    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
7193        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
7194        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
7195                filterDensity)) {
7196            rc = BAD_VALUE;
7197        }
7198    }
7199
7200    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
7201        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
7202        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_FOCAL_LENGTH, focalLength)) {
7203            rc = BAD_VALUE;
7204        }
7205    }
7206
7207    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
7208        uint8_t optStabMode =
7209                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
7210        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_OPT_STAB_MODE, optStabMode)) {
7211            rc = BAD_VALUE;
7212        }
7213    }
7214
7215    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
7216        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
7217        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_NOISE_REDUCTION_MODE,
7218                noiseRedMode)) {
7219            rc = BAD_VALUE;
7220        }
7221    }
7222
7223    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
7224        uint8_t noiseRedStrength =
7225                frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
7226        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
7227                noiseRedStrength)) {
7228            rc = BAD_VALUE;
7229        }
7230    }
7231
7232    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
7233        float reprocessEffectiveExposureFactor =
7234            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
7235        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
7236                reprocessEffectiveExposureFactor)) {
7237            rc = BAD_VALUE;
7238        }
7239    }
7240
7241    cam_crop_region_t scalerCropRegion;
7242    bool scalerCropSet = false;
7243    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
7244        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
7245        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
7246        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
7247        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
7248
7249        // Map coordinate system from active array to sensor output.
7250        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
7251                scalerCropRegion.width, scalerCropRegion.height);
7252
7253        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
7254                scalerCropRegion)) {
7255            rc = BAD_VALUE;
7256        }
7257        scalerCropSet = true;
7258    }
7259
7260    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
7261        int64_t sensorExpTime =
7262                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
7263        CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
7264        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
7265                sensorExpTime)) {
7266            rc = BAD_VALUE;
7267        }
7268    }
7269
7270    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
7271        int64_t sensorFrameDuration =
7272                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
7273        int64_t minFrameDuration = getMinFrameDuration(request);
7274        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
7275        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
7276            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
7277        CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
7278        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
7279                sensorFrameDuration)) {
7280            rc = BAD_VALUE;
7281        }
7282    }
7283
7284    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
7285        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
7286        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
7287                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
7288        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
7289                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
7290        CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
7291        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
7292                sensorSensitivity)) {
7293            rc = BAD_VALUE;
7294        }
7295    }
7296
7297    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
7298        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
7299        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SHADING_MODE, shadingMode)) {
7300            rc = BAD_VALUE;
7301        }
7302    }
7303
7304    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
7305        uint8_t shadingStrength = frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
7306        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SHADING_STRENGTH,
7307                shadingStrength)) {
7308            rc = BAD_VALUE;
7309        }
7310    }
7311
7312
7313    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
7314        uint8_t fwk_facedetectMode =
7315                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
7316        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7317                fwk_facedetectMode);
7318        if (NAME_NOT_FOUND != val) {
7319            uint8_t facedetectMode = (uint8_t)val;
7320            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
7321                    facedetectMode)) {
7322                rc = BAD_VALUE;
7323            }
7324        }
7325    }
7326
7327    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
7328        uint8_t histogramMode =
7329                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
7330        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_HISTOGRAM_MODE,
7331                histogramMode)) {
7332            rc = BAD_VALUE;
7333        }
7334    }
7335
7336    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
7337        uint8_t sharpnessMapMode =
7338                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
7339        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
7340                sharpnessMapMode)) {
7341            rc = BAD_VALUE;
7342        }
7343    }
7344
7345    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
7346        uint8_t tonemapMode =
7347                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
7348        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
7349            rc = BAD_VALUE;
7350        }
7351    }
7352    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
7353    /*All tonemap channels will have the same number of points*/
7354    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
7355        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
7356        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
7357        cam_rgb_tonemap_curves tonemapCurves;
7358        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
7359        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7360            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
7361                    __func__, tonemapCurves.tonemap_points_cnt,
7362                    CAM_MAX_TONEMAP_CURVE_SIZE);
7363            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7364        }
7365
7366        /* ch0 = G*/
7367        size_t point = 0;
7368        cam_tonemap_curve_t tonemapCurveGreen;
7369        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7370            for (size_t j = 0; j < 2; j++) {
7371               tonemapCurveGreen.tonemap_points[i][j] =
7372                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
7373               point++;
7374            }
7375        }
7376        tonemapCurves.curves[0] = tonemapCurveGreen;
7377
7378        /* ch 1 = B */
7379        point = 0;
7380        cam_tonemap_curve_t tonemapCurveBlue;
7381        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7382            for (size_t j = 0; j < 2; j++) {
7383               tonemapCurveBlue.tonemap_points[i][j] =
7384                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
7385               point++;
7386            }
7387        }
7388        tonemapCurves.curves[1] = tonemapCurveBlue;
7389
7390        /* ch 2 = R */
7391        point = 0;
7392        cam_tonemap_curve_t tonemapCurveRed;
7393        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7394            for (size_t j = 0; j < 2; j++) {
7395               tonemapCurveRed.tonemap_points[i][j] =
7396                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
7397               point++;
7398            }
7399        }
7400        tonemapCurves.curves[2] = tonemapCurveRed;
7401
7402        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
7403                tonemapCurves)) {
7404            rc = BAD_VALUE;
7405        }
7406    }
7407
7408    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
7409        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
7410        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT,
7411                captureIntent)) {
7412            rc = BAD_VALUE;
7413        }
7414    }
7415
7416    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
7417        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
7418        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
7419                blackLevelLock)) {
7420            rc = BAD_VALUE;
7421        }
7422    }
7423
7424    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
7425        uint8_t lensShadingMapMode =
7426                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
7427        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
7428                lensShadingMapMode)) {
7429            rc = BAD_VALUE;
7430        }
7431    }
7432
7433    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
7434        cam_area_t roi;
7435        bool reset = true;
7436        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
7437        if (scalerCropSet) {
7438            reset = resetIfNeededROI(&roi, &scalerCropRegion);
7439        }
7440        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
7441            rc = BAD_VALUE;
7442        }
7443    }
7444
7445    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
7446        cam_area_t roi;
7447        bool reset = true;
7448        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
7449        if (scalerCropSet) {
7450            reset = resetIfNeededROI(&roi, &scalerCropRegion);
7451        }
7452        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
7453            rc = BAD_VALUE;
7454        }
7455    }
7456
7457    // CDS
7458    if (frame_settings.exists(QCAMERA3_CDS_MODE)) {
7459        int32_t *cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
7460        if ((CAM_CDS_MODE_MAX <= (*cds)) || (0 > (*cds))) {
7461            ALOGE("%s: Invalid CDS mode %d!", __func__, *cds);
7462        } else {
7463            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CDS_MODE, *cds)) {
7464                rc = BAD_VALUE;
7465            }
7466        }
7467    }
7468
7469    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
7470        int32_t fwk_testPatternMode =
7471                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
7472        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
7473                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
7474
7475        if (NAME_NOT_FOUND != testPatternMode) {
7476            cam_test_pattern_data_t testPatternData;
7477            memset(&testPatternData, 0, sizeof(testPatternData));
7478            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
7479            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
7480                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
7481                int32_t *fwk_testPatternData =
7482                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
7483                testPatternData.r = fwk_testPatternData[0];
7484                testPatternData.b = fwk_testPatternData[3];
7485                switch (gCamCapability[mCameraId]->color_arrangement) {
7486                    case CAM_FILTER_ARRANGEMENT_RGGB:
7487                    case CAM_FILTER_ARRANGEMENT_GRBG:
7488                        testPatternData.gr = fwk_testPatternData[1];
7489                        testPatternData.gb = fwk_testPatternData[2];
7490                        break;
7491                    case CAM_FILTER_ARRANGEMENT_GBRG:
7492                    case CAM_FILTER_ARRANGEMENT_BGGR:
7493                        testPatternData.gr = fwk_testPatternData[2];
7494                        testPatternData.gb = fwk_testPatternData[1];
7495                        break;
7496                    default:
7497                        ALOGE("%s: color arrangement %d is not supported", __func__,
7498                                gCamCapability[mCameraId]->color_arrangement);
7499                        break;
7500                }
7501            }
7502            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
7503                    testPatternData)) {
7504                rc = BAD_VALUE;
7505            }
7506        } else {
7507            ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
7508                    fwk_testPatternMode);
7509        }
7510    }
7511
7512    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
7513        size_t count = 0;
7514        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
7515        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
7516                gps_coords.data.d, gps_coords.count, count);
7517        if (gps_coords.count != count) {
7518            rc = BAD_VALUE;
7519        }
7520    }
7521
7522    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
7523        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
7524        size_t count = 0;
7525        const char *gps_methods_src = (const char *)
7526                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
7527        memset(gps_methods, '\0', sizeof(gps_methods));
7528        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
7529        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
7530                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
7531        if (GPS_PROCESSING_METHOD_SIZE != count) {
7532            rc = BAD_VALUE;
7533        }
7534    }
7535
7536    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
7537        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
7538        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
7539                gps_timestamp)) {
7540            rc = BAD_VALUE;
7541        }
7542    }
7543
7544    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7545        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
7546        cam_rotation_info_t rotation_info;
7547        if (orientation == 0) {
7548           rotation_info.rotation = ROTATE_0;
7549        } else if (orientation == 90) {
7550           rotation_info.rotation = ROTATE_90;
7551        } else if (orientation == 180) {
7552           rotation_info.rotation = ROTATE_180;
7553        } else if (orientation == 270) {
7554           rotation_info.rotation = ROTATE_270;
7555        }
7556        rotation_info.streamId = snapshotStreamId;
7557        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
7558        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
7559            rc = BAD_VALUE;
7560        }
7561    }
7562
7563    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
7564        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
7565        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
7566            rc = BAD_VALUE;
7567        }
7568    }
7569
7570    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
7571        uint32_t thumb_quality = (uint32_t)
7572                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
7573        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
7574                thumb_quality)) {
7575            rc = BAD_VALUE;
7576        }
7577    }
7578
7579    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7580        cam_dimension_t dim;
7581        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7582        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7583        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
7584            rc = BAD_VALUE;
7585        }
7586    }
7587
7588    // Internal metadata
7589    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
7590        size_t count = 0;
7591        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
7592        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
7593                privatedata.data.i32, privatedata.count, count);
7594        if (privatedata.count != count) {
7595            rc = BAD_VALUE;
7596        }
7597    }
7598
7599    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
7600        uint8_t* use_av_timer =
7601                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
7602        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
7603            rc = BAD_VALUE;
7604        }
7605    }
7606
7607    // EV step
7608    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
7609            gCamCapability[mCameraId]->exp_compensation_step)) {
7610        rc = BAD_VALUE;
7611    }
7612
7613    return rc;
7614}
7615
7616/*===========================================================================
7617 * FUNCTION   : captureResultCb
7618 *
7619 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
7620 *
7621 * PARAMETERS :
7622 *   @frame  : frame information from mm-camera-interface
7623 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
7624 *   @userdata: userdata
7625 *
7626 * RETURN     : NONE
7627 *==========================================================================*/
7628void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
7629                camera3_stream_buffer_t *buffer,
7630                uint32_t frame_number, void *userdata)
7631{
7632    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
7633    if (hw == NULL) {
7634        ALOGE("%s: Invalid hw %p", __func__, hw);
7635        return;
7636    }
7637
7638    hw->captureResultCb(metadata, buffer, frame_number);
7639    return;
7640}
7641
7642
7643/*===========================================================================
7644 * FUNCTION   : initialize
7645 *
7646 * DESCRIPTION: Pass framework callback pointers to HAL
7647 *
7648 * PARAMETERS :
7649 *
7650 *
7651 * RETURN     : Success : 0
7652 *              Failure: -ENODEV
7653 *==========================================================================*/
7654
7655int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
7656                                  const camera3_callback_ops_t *callback_ops)
7657{
7658    CDBG("%s: E", __func__);
7659    QCamera3HardwareInterface *hw =
7660        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7661    if (!hw) {
7662        ALOGE("%s: NULL camera device", __func__);
7663        return -ENODEV;
7664    }
7665
7666    int rc = hw->initialize(callback_ops);
7667    CDBG("%s: X", __func__);
7668    return rc;
7669}
7670
7671/*===========================================================================
7672 * FUNCTION   : configure_streams
7673 *
7674 * DESCRIPTION:
7675 *
7676 * PARAMETERS :
7677 *
7678 *
7679 * RETURN     : Success: 0
7680 *              Failure: -EINVAL (if stream configuration is invalid)
7681 *                       -ENODEV (fatal error)
7682 *==========================================================================*/
7683
7684int QCamera3HardwareInterface::configure_streams(
7685        const struct camera3_device *device,
7686        camera3_stream_configuration_t *stream_list)
7687{
7688    CDBG("%s: E", __func__);
7689    QCamera3HardwareInterface *hw =
7690        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7691    if (!hw) {
7692        ALOGE("%s: NULL camera device", __func__);
7693        return -ENODEV;
7694    }
7695    int rc = hw->configureStreams(stream_list);
7696    CDBG("%s: X", __func__);
7697    return rc;
7698}
7699
7700/*===========================================================================
7701 * FUNCTION   : construct_default_request_settings
7702 *
7703 * DESCRIPTION: Configure a settings buffer to meet the required use case
7704 *
7705 * PARAMETERS :
7706 *
7707 *
7708 * RETURN     : Success: Return valid metadata
7709 *              Failure: Return NULL
7710 *==========================================================================*/
7711const camera_metadata_t* QCamera3HardwareInterface::
7712    construct_default_request_settings(const struct camera3_device *device,
7713                                        int type)
7714{
7715
7716    CDBG("%s: E", __func__);
7717    camera_metadata_t* fwk_metadata = NULL;
7718    QCamera3HardwareInterface *hw =
7719        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7720    if (!hw) {
7721        ALOGE("%s: NULL camera device", __func__);
7722        return NULL;
7723    }
7724
7725    fwk_metadata = hw->translateCapabilityToMetadata(type);
7726
7727    CDBG("%s: X", __func__);
7728    return fwk_metadata;
7729}
7730
7731/*===========================================================================
7732 * FUNCTION   : process_capture_request
7733 *
7734 * DESCRIPTION:
7735 *
7736 * PARAMETERS :
7737 *
7738 *
7739 * RETURN     :
7740 *==========================================================================*/
7741int QCamera3HardwareInterface::process_capture_request(
7742                    const struct camera3_device *device,
7743                    camera3_capture_request_t *request)
7744{
7745    CDBG("%s: E", __func__);
7746    QCamera3HardwareInterface *hw =
7747        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7748    if (!hw) {
7749        ALOGE("%s: NULL camera device", __func__);
7750        return -EINVAL;
7751    }
7752
7753    int rc = hw->processCaptureRequest(request);
7754    CDBG("%s: X", __func__);
7755    return rc;
7756}
7757
7758/*===========================================================================
7759 * FUNCTION   : dump
7760 *
7761 * DESCRIPTION:
7762 *
7763 * PARAMETERS :
7764 *
7765 *
7766 * RETURN     :
7767 *==========================================================================*/
7768
7769void QCamera3HardwareInterface::dump(
7770                const struct camera3_device *device, int fd)
7771{
7772    /* Log level property is read when "adb shell dumpsys media.camera" is
7773       called so that the log level can be controlled without restarting
7774       the media server */
7775    getLogLevel();
7776
7777    CDBG("%s: E", __func__);
7778    QCamera3HardwareInterface *hw =
7779        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7780    if (!hw) {
7781        ALOGE("%s: NULL camera device", __func__);
7782        return;
7783    }
7784
7785    hw->dump(fd);
7786    CDBG("%s: X", __func__);
7787    return;
7788}
7789
7790/*===========================================================================
7791 * FUNCTION   : flush
7792 *
7793 * DESCRIPTION:
7794 *
7795 * PARAMETERS :
7796 *
7797 *
7798 * RETURN     :
7799 *==========================================================================*/
7800
7801int QCamera3HardwareInterface::flush(
7802                const struct camera3_device *device)
7803{
7804    int rc;
7805    CDBG("%s: E", __func__);
7806    QCamera3HardwareInterface *hw =
7807        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
7808    if (!hw) {
7809        ALOGE("%s: NULL camera device", __func__);
7810        return -EINVAL;
7811    }
7812
7813    rc = hw->flush();
7814    CDBG("%s: X", __func__);
7815    return rc;
7816}
7817
7818/*===========================================================================
7819 * FUNCTION   : close_camera_device
7820 *
7821 * DESCRIPTION:
7822 *
7823 * PARAMETERS :
7824 *
7825 *
7826 * RETURN     :
7827 *==========================================================================*/
7828int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
7829{
7830    CDBG("%s: E", __func__);
7831    int ret = NO_ERROR;
7832    QCamera3HardwareInterface *hw =
7833        reinterpret_cast<QCamera3HardwareInterface *>(
7834            reinterpret_cast<camera3_device_t *>(device)->priv);
7835    if (!hw) {
7836        ALOGE("NULL camera device");
7837        return BAD_VALUE;
7838    }
7839    delete hw;
7840
7841    CDBG("%s: X", __func__);
7842    return ret;
7843}
7844
7845/*===========================================================================
7846 * FUNCTION   : getWaveletDenoiseProcessPlate
7847 *
7848 * DESCRIPTION: query wavelet denoise process plate
7849 *
7850 * PARAMETERS : None
7851 *
7852 * RETURN     : WNR prcocess plate vlaue
7853 *==========================================================================*/
7854cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
7855{
7856    char prop[PROPERTY_VALUE_MAX];
7857    memset(prop, 0, sizeof(prop));
7858    property_get("persist.denoise.process.plates", prop, "0");
7859    int processPlate = atoi(prop);
7860    switch(processPlate) {
7861    case 0:
7862        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
7863    case 1:
7864        return CAM_WAVELET_DENOISE_CBCR_ONLY;
7865    case 2:
7866        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
7867    case 3:
7868        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
7869    default:
7870        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
7871    }
7872}
7873
7874
7875/*===========================================================================
7876 * FUNCTION   : extractSceneMode
7877 *
7878 * DESCRIPTION: Extract scene mode from frameworks set metadata
7879 *
7880 * PARAMETERS :
7881 *      @frame_settings: CameraMetadata reference
7882 *      @metaMode: ANDROID_CONTORL_MODE
7883 *      @hal_metadata: hal metadata structure
7884 *
7885 * RETURN     : None
7886 *==========================================================================*/
7887int32_t QCamera3HardwareInterface::extractSceneMode(
7888        const CameraMetadata &frame_settings, uint8_t metaMode,
7889        metadata_buffer_t *hal_metadata)
7890{
7891    int32_t rc = NO_ERROR;
7892
7893    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
7894        camera_metadata_ro_entry entry =
7895                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
7896        if (0 == entry.count)
7897            return rc;
7898
7899        uint8_t fwk_sceneMode = entry.data.u8[0];
7900
7901        int val = lookupHalName(SCENE_MODES_MAP,
7902                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
7903                fwk_sceneMode);
7904        if (NAME_NOT_FOUND != val) {
7905            uint8_t sceneMode = (uint8_t)val;
7906            CDBG("%s: sceneMode: %d", __func__, sceneMode);
7907            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
7908                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
7909                rc = BAD_VALUE;
7910            }
7911        }
7912    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
7913            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
7914        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
7915        CDBG("%s: sceneMode: %d", __func__, sceneMode);
7916        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
7917                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
7918            rc = BAD_VALUE;
7919        }
7920    }
7921    return rc;
7922}
7923
7924/*===========================================================================
7925 * FUNCTION   : needRotationReprocess
7926 *
7927 * DESCRIPTION: if rotation needs to be done by reprocess in pp
7928 *
7929 * PARAMETERS : none
7930 *
7931 * RETURN     : true: needed
7932 *              false: no need
7933 *==========================================================================*/
7934bool QCamera3HardwareInterface::needRotationReprocess()
7935{
7936    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
7937        // current rotation is not zero, and pp has the capability to process rotation
7938        CDBG_HIGH("%s: need do reprocess for rotation", __func__);
7939        return true;
7940    }
7941
7942    return false;
7943}
7944
7945/*===========================================================================
7946 * FUNCTION   : needReprocess
7947 *
7948 * DESCRIPTION: if reprocess in needed
7949 *
7950 * PARAMETERS : none
7951 *
7952 * RETURN     : true: needed
7953 *              false: no need
7954 *==========================================================================*/
7955bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
7956{
7957    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
7958        // TODO: add for ZSL HDR later
7959        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
7960        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
7961            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
7962            return true;
7963        } else {
7964            CDBG_HIGH("%s: already post processed frame", __func__);
7965            return false;
7966        }
7967    }
7968    return needRotationReprocess();
7969}
7970
7971/*===========================================================================
7972 * FUNCTION   : needJpegRotation
7973 *
7974 * DESCRIPTION: if rotation from jpeg is needed
7975 *
7976 * PARAMETERS : none
7977 *
7978 * RETURN     : true: needed
7979 *              false: no need
7980 *==========================================================================*/
7981bool QCamera3HardwareInterface::needJpegRotation()
7982{
7983   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
7984    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
7985       CDBG("%s: Need Jpeg to do the rotation", __func__);
7986       return true;
7987    }
7988    return false;
7989}
7990
7991/*===========================================================================
7992 * FUNCTION   : addOfflineReprocChannel
7993 *
7994 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
7995 *              coming from input channel
7996 *
7997 * PARAMETERS :
7998 *   @config  : reprocess configuration
7999 *
8000 *
8001 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
8002 *==========================================================================*/
8003QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
8004        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
8005{
8006    int32_t rc = NO_ERROR;
8007    QCamera3ReprocessChannel *pChannel = NULL;
8008
8009    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
8010            mCameraHandle->ops, NULL, config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
8011    if (NULL == pChannel) {
8012        ALOGE("%s: no mem for reprocess channel", __func__);
8013        return NULL;
8014    }
8015
8016    rc = pChannel->initialize(IS_TYPE_NONE);
8017    if (rc != NO_ERROR) {
8018        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
8019        delete pChannel;
8020        return NULL;
8021    }
8022
8023    // pp feature config
8024    cam_pp_feature_config_t pp_config;
8025    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
8026
8027    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
8028
8029    rc = pChannel->addReprocStreamsFromSource(pp_config,
8030            config,
8031            IS_TYPE_NONE,
8032            mMetadataChannel);
8033
8034    if (rc != NO_ERROR) {
8035        delete pChannel;
8036        return NULL;
8037    }
8038    return pChannel;
8039}
8040
8041/*===========================================================================
8042 * FUNCTION   : getMobicatMask
8043 *
8044 * DESCRIPTION: returns mobicat mask
8045 *
8046 * PARAMETERS : none
8047 *
8048 * RETURN     : mobicat mask
8049 *
8050 *==========================================================================*/
8051uint8_t QCamera3HardwareInterface::getMobicatMask()
8052{
8053    return m_MobicatMask;
8054}
8055
8056/*===========================================================================
8057 * FUNCTION   : setMobicat
8058 *
8059 * DESCRIPTION: set Mobicat on/off.
8060 *
8061 * PARAMETERS :
8062 *   @params  : none
8063 *
8064 * RETURN     : int32_t type of status
8065 *              NO_ERROR  -- success
8066 *              none-zero failure code
8067 *==========================================================================*/
8068int32_t QCamera3HardwareInterface::setMobicat()
8069{
8070    char value [PROPERTY_VALUE_MAX];
8071    property_get("persist.camera.mobicat", value, "0");
8072    int32_t ret = NO_ERROR;
8073    uint8_t enableMobi = (uint8_t)atoi(value);
8074
8075    if (enableMobi) {
8076        tune_cmd_t tune_cmd;
8077        tune_cmd.type = SET_RELOAD_CHROMATIX;
8078        tune_cmd.module = MODULE_ALL;
8079        tune_cmd.value = TRUE;
8080        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8081                CAM_INTF_PARM_SET_VFE_COMMAND,
8082                tune_cmd);
8083
8084        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8085                CAM_INTF_PARM_SET_PP_COMMAND,
8086                tune_cmd);
8087    }
8088    m_MobicatMask = enableMobi;
8089
8090    return ret;
8091}
8092
8093/*===========================================================================
8094* FUNCTION   : getLogLevel
8095*
8096* DESCRIPTION: Reads the log level property into a variable
8097*
8098* PARAMETERS :
8099*   None
8100*
8101* RETURN     :
8102*   None
8103*==========================================================================*/
8104void QCamera3HardwareInterface::getLogLevel()
8105{
8106    char prop[PROPERTY_VALUE_MAX];
8107    uint32_t globalLogLevel = 0;
8108
8109    property_get("persist.camera.hal.debug", prop, "0");
8110    int val = atoi(prop);
8111    if (0 <= val) {
8112        gCamHal3LogLevel = (uint32_t)val;
8113    }
8114    property_get("persist.camera.global.debug", prop, "0");
8115    val = atoi(prop);
8116    if (0 <= val) {
8117        globalLogLevel = (uint32_t)val;
8118    }
8119
8120    /* Highest log level among hal.logs and global.logs is selected */
8121    if (gCamHal3LogLevel < globalLogLevel)
8122        gCamHal3LogLevel = globalLogLevel;
8123
8124    return;
8125}
8126
8127/*===========================================================================
8128 * FUNCTION   : validateStreamRotations
8129 *
8130 * DESCRIPTION: Check if the rotations requested are supported
8131 *
8132 * PARAMETERS :
8133 *   @stream_list : streams to be configured
8134 *
8135 * RETURN     : NO_ERROR on success
8136 *              -EINVAL on failure
8137 *
8138 *==========================================================================*/
8139int QCamera3HardwareInterface::validateStreamRotations(
8140        camera3_stream_configuration_t *streamList)
8141{
8142    int rc = NO_ERROR;
8143
8144    /*
8145    * Loop through all streams requested in configuration
8146    * Check if unsupported rotations have been requested on any of them
8147    */
8148    for (size_t j = 0; j < streamList->num_streams; j++){
8149        camera3_stream_t *newStream = streamList->streams[j];
8150
8151        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
8152        bool isImplDef = (newStream->format ==
8153                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
8154        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
8155                isImplDef);
8156
8157        if (isRotated && (!isImplDef || isZsl)) {
8158            ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
8159                    "type:%d and stream format:%d", __func__,
8160                    newStream->rotation, newStream->stream_type,
8161                    newStream->format);
8162            rc = -EINVAL;
8163            break;
8164        }
8165    }
8166    return rc;
8167}
8168
8169}; //end namespace qcamera
8170