QCamera3HWI.cpp revision 0bfece10f02ca969736a88f2fbf141373a715efa
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <sync/sync.h>
44#include "gralloc_priv.h"
45
46// Display dependencies
47#include "qdMetaData.h"
48
49// Camera dependencies
50#include "android/QCamera3External.h"
51#include "util/QCameraFlash.h"
52#include "QCamera3HWI.h"
53#include "QCamera3VendorTags.h"
54#include "QCameraTrace.h"
55
56extern "C" {
57#include "mm_camera_dbg.h"
58}
59
60using namespace android;
61
62namespace qcamera {
63
64#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
65
66#define EMPTY_PIPELINE_DELAY 2
67#define PARTIAL_RESULT_COUNT 2
68#define FRAME_SKIP_DELAY     0
69
70#define MAX_VALUE_8BIT ((1<<8)-1)
71#define MAX_VALUE_10BIT ((1<<10)-1)
72#define MAX_VALUE_12BIT ((1<<12)-1)
73
74#define VIDEO_4K_WIDTH  3840
75#define VIDEO_4K_HEIGHT 2160
76
77#define MAX_EIS_WIDTH 1920
78#define MAX_EIS_HEIGHT 1080
79
80#define MAX_RAW_STREAMS        1
81#define MAX_STALLING_STREAMS   1
82#define MAX_PROCESSED_STREAMS  3
83/* Batch mode is enabled only if FPS set is equal to or greater than this */
84#define MIN_FPS_FOR_BATCH_MODE (120)
85#define PREVIEW_FPS_FOR_HFR    (30)
86#define DEFAULT_VIDEO_FPS      (30.0)
87#define MAX_HFR_BATCH_SIZE     (8)
88#define REGIONS_TUPLE_COUNT    5
89#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
90#define BURST_REPROCESS_PERF_TIME_OUT  (1000) // milliseconds
91// Set a threshold for detection of missing buffers //seconds
92#define MISSING_REQUEST_BUF_TIMEOUT 3
93#define FLUSH_TIMEOUT 3
94#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
95
96#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
97                                              CAM_QCOM_FEATURE_CROP |\
98                                              CAM_QCOM_FEATURE_ROTATION |\
99                                              CAM_QCOM_FEATURE_SHARPNESS |\
100                                              CAM_QCOM_FEATURE_SCALE |\
101                                              CAM_QCOM_FEATURE_CAC |\
102                                              CAM_QCOM_FEATURE_CDS )
103/* Per configuration size for static metadata length*/
104#define PER_CONFIGURATION_SIZE_3 (3)
105
106#define TIMEOUT_NEVER -1
107
108cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
109const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
110extern pthread_mutex_t gCamLock;
111volatile uint32_t gCamHal3LogLevel = 1;
112extern uint8_t gNumCameraSessions;
113
114const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
115    {"On",  CAM_CDS_MODE_ON},
116    {"Off", CAM_CDS_MODE_OFF},
117    {"Auto",CAM_CDS_MODE_AUTO}
118};
119
120const QCamera3HardwareInterface::QCameraMap<
121        camera_metadata_enum_android_control_effect_mode_t,
122        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
123    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
124    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
125    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
126    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
127    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
128    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
129    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
130    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
132};
133
134const QCamera3HardwareInterface::QCameraMap<
135        camera_metadata_enum_android_control_awb_mode_t,
136        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
137    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
138    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
139    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
140    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
141    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
142    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
143    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
145    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
146};
147
148const QCamera3HardwareInterface::QCameraMap<
149        camera_metadata_enum_android_control_scene_mode_t,
150        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
151    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
152    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
153    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
154    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
155    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
157    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
158    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
159    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
160    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
161    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
162    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
163    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
164    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
165    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
166    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
167};
168
169const QCamera3HardwareInterface::QCameraMap<
170        camera_metadata_enum_android_control_af_mode_t,
171        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
172    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
174    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
175    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
176    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
177    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_color_correction_aberration_mode_t,
183        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
184    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
185            CAM_COLOR_CORRECTION_ABERRATION_OFF },
186    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
187            CAM_COLOR_CORRECTION_ABERRATION_FAST },
188    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
189            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
190};
191
192const QCamera3HardwareInterface::QCameraMap<
193        camera_metadata_enum_android_control_ae_antibanding_mode_t,
194        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
195    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_control_ae_mode_t,
203        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
204    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
205    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
207    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
208    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
209};
210
211const QCamera3HardwareInterface::QCameraMap<
212        camera_metadata_enum_android_flash_mode_t,
213        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
214    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
215    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
216    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220        camera_metadata_enum_android_statistics_face_detect_mode_t,
221        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
222    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
229        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
230    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
231      CAM_FOCUS_UNCALIBRATED },
232    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
233      CAM_FOCUS_APPROXIMATE },
234    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
235      CAM_FOCUS_CALIBRATED }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239        camera_metadata_enum_android_lens_state_t,
240        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
241    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
242    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
243};
244
245const int32_t available_thumbnail_sizes[] = {0, 0,
246                                             176, 144,
247                                             320, 240,
248                                             432, 288,
249                                             480, 288,
250                                             512, 288,
251                                             512, 384};
252
253const cam_dimension_t default_hfr_video_sizes[] = {
254    { 3840, 2160 },
255    { 1920, 1080 },
256    { 1280,  720 },
257    {  640,  480 },
258    {  480,  320 }
259};
260
261
262const QCamera3HardwareInterface::QCameraMap<
263        camera_metadata_enum_android_sensor_test_pattern_mode_t,
264        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
265    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
266    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
267    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
268    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
269    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
270    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
271};
272
273/* Since there is no mapping for all the options some Android enum are not listed.
274 * Also, the order in this list is important because while mapping from HAL to Android it will
275 * traverse from lower to higher index which means that for HAL values that are map to different
276 * Android values, the traverse logic will select the first one found.
277 */
278const QCamera3HardwareInterface::QCameraMap<
279        camera_metadata_enum_android_sensor_reference_illuminant1_t,
280        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
289    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
290    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
291    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
292    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
293    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
294    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
295    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
296    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
301    { 60, CAM_HFR_MODE_60FPS},
302    { 90, CAM_HFR_MODE_90FPS},
303    { 120, CAM_HFR_MODE_120FPS},
304    { 150, CAM_HFR_MODE_150FPS},
305    { 180, CAM_HFR_MODE_180FPS},
306    { 210, CAM_HFR_MODE_210FPS},
307    { 240, CAM_HFR_MODE_240FPS},
308    { 480, CAM_HFR_MODE_480FPS},
309};
310
311camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
312    .initialize                         = QCamera3HardwareInterface::initialize,
313    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
314    .register_stream_buffers            = NULL,
315    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
316    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
317    .get_metadata_vendor_tag_ops        = NULL,
318    .dump                               = QCamera3HardwareInterface::dump,
319    .flush                              = QCamera3HardwareInterface::flush,
320    .reserved                           = {0},
321};
322
323/*===========================================================================
324 * FUNCTION   : QCamera3HardwareInterface
325 *
326 * DESCRIPTION: constructor of QCamera3HardwareInterface
327 *
328 * PARAMETERS :
329 *   @cameraId  : camera ID
330 *
331 * RETURN     : none
332 *==========================================================================*/
333QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
334        const camera_module_callbacks_t *callbacks)
335    : mCameraId(cameraId),
336      mCameraHandle(NULL),
337      mCameraInitialized(false),
338      mCallbackOps(NULL),
339      mMetadataChannel(NULL),
340      mPictureChannel(NULL),
341      mRawChannel(NULL),
342      mSupportChannel(NULL),
343      mAnalysisChannel(NULL),
344      mRawDumpChannel(NULL),
345      mDummyBatchChannel(NULL),
346      mChannelHandle(0),
347      mFirstConfiguration(true),
348      mFlush(false),
349      mFlushPerf(false),
350      mParamHeap(NULL),
351      mParameters(NULL),
352      mPrevParameters(NULL),
353      m_bIsVideo(false),
354      m_bIs4KVideo(false),
355      m_bEisSupportedSize(false),
356      m_bEisEnable(false),
357      m_MobicatMask(0),
358      mMinProcessedFrameDuration(0),
359      mMinJpegFrameDuration(0),
360      mMinRawFrameDuration(0),
361      mMetaFrameCount(0U),
362      mUpdateDebugLevel(false),
363      mCallbacks(callbacks),
364      mCaptureIntent(0),
365      mCacMode(0),
366      mHybridAeEnable(0),
367      mBatchSize(0),
368      mToBeQueuedVidBufs(0),
369      mHFRVideoFps(DEFAULT_VIDEO_FPS),
370      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
371      mFirstFrameNumberInBatch(0),
372      mNeedSensorRestart(false),
373      mLdafCalibExist(false),
374      mPowerHintEnabled(false),
375      mLastCustIntentFrmNum(-1),
376      mState(CLOSED)
377{
378    getLogLevel();
379    m_perfLock.lock_init();
380    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
381    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
382    mCameraDevice.common.close = close_camera_device;
383    mCameraDevice.ops = &mCameraOps;
384    mCameraDevice.priv = this;
385    gCamCapability[cameraId]->version = CAM_HAL_V3;
386    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
387    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
388    gCamCapability[cameraId]->min_num_pp_bufs = 3;
389
390    pthread_cond_init(&mBuffersCond, NULL);
391
392    pthread_cond_init(&mRequestCond, NULL);
393    mPendingLiveRequest = 0;
394    mCurrentRequestId = -1;
395    pthread_mutex_init(&mMutex, NULL);
396
397    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
398        mDefaultMetadata[i] = NULL;
399
400    // Getting system props of different kinds
401    char prop[PROPERTY_VALUE_MAX];
402    memset(prop, 0, sizeof(prop));
403    property_get("persist.camera.raw.dump", prop, "0");
404    mEnableRawDump = atoi(prop);
405    if (mEnableRawDump)
406        LOGD("Raw dump from Camera HAL enabled");
407
408    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
409    memset(mLdafCalib, 0, sizeof(mLdafCalib));
410
411    memset(prop, 0, sizeof(prop));
412    property_get("persist.camera.tnr.preview", prop, "0");
413    m_bTnrPreview = (uint8_t)atoi(prop);
414
415    memset(prop, 0, sizeof(prop));
416    property_get("persist.camera.tnr.video", prop, "0");
417    m_bTnrVideo = (uint8_t)atoi(prop);
418
419    //Load and read GPU library.
420    lib_surface_utils = NULL;
421    LINK_get_surface_pixel_alignment = NULL;
422    mSurfaceStridePadding = CAM_PAD_TO_32;
423    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
424    if (lib_surface_utils) {
425        *(void **)&LINK_get_surface_pixel_alignment =
426                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
427         if (LINK_get_surface_pixel_alignment) {
428             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
429         }
430         dlclose(lib_surface_utils);
431    }
432}
433
434/*===========================================================================
435 * FUNCTION   : ~QCamera3HardwareInterface
436 *
437 * DESCRIPTION: destructor of QCamera3HardwareInterface
438 *
439 * PARAMETERS : none
440 *
441 * RETURN     : none
442 *==========================================================================*/
443QCamera3HardwareInterface::~QCamera3HardwareInterface()
444{
445    LOGD("E");
446
447    /* Turn off current power hint before acquiring perfLock in case they
448     * conflict with each other */
449    disablePowerHint();
450
451    m_perfLock.lock_acq();
452
453    /* We need to stop all streams before deleting any stream */
454    if (mRawDumpChannel) {
455        mRawDumpChannel->stop();
456    }
457
458    // NOTE: 'camera3_stream_t *' objects are already freed at
459    //        this stage by the framework
460    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
461        it != mStreamInfo.end(); it++) {
462        QCamera3ProcessingChannel *channel = (*it)->channel;
463        if (channel) {
464            channel->stop();
465        }
466    }
467    if (mSupportChannel)
468        mSupportChannel->stop();
469
470    if (mAnalysisChannel) {
471        mAnalysisChannel->stop();
472    }
473    if (mMetadataChannel) {
474        mMetadataChannel->stop();
475    }
476    if (mChannelHandle) {
477        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
478                mChannelHandle);
479        LOGD("stopping channel %d", mChannelHandle);
480    }
481
482    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
483        it != mStreamInfo.end(); it++) {
484        QCamera3ProcessingChannel *channel = (*it)->channel;
485        if (channel)
486            delete channel;
487        free (*it);
488    }
489    if (mSupportChannel) {
490        delete mSupportChannel;
491        mSupportChannel = NULL;
492    }
493
494    if (mAnalysisChannel) {
495        delete mAnalysisChannel;
496        mAnalysisChannel = NULL;
497    }
498    if (mRawDumpChannel) {
499        delete mRawDumpChannel;
500        mRawDumpChannel = NULL;
501    }
502    if (mDummyBatchChannel) {
503        delete mDummyBatchChannel;
504        mDummyBatchChannel = NULL;
505    }
506    mPictureChannel = NULL;
507
508    if (mMetadataChannel) {
509        delete mMetadataChannel;
510        mMetadataChannel = NULL;
511    }
512
513    /* Clean up all channels */
514    if (mCameraInitialized) {
515        if(!mFirstConfiguration){
516            //send the last unconfigure
517            cam_stream_size_info_t stream_config_info;
518            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
519            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
520            stream_config_info.buffer_info.max_buffers =
521                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
522            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
523                    stream_config_info);
524            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
525            if (rc < 0) {
526                LOGE("set_parms failed for unconfigure");
527            }
528        }
529        deinitParameters();
530    }
531
532    if (mChannelHandle) {
533        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
534                mChannelHandle);
535        LOGH("deleting channel %d", mChannelHandle);
536        mChannelHandle = 0;
537    }
538
539    if (mState != CLOSED)
540        closeCamera();
541
542    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
543        req.mPendingBufferList.clear();
544    }
545    mPendingBuffersMap.mPendingBuffersInRequest.clear();
546    mPendingReprocessResultList.clear();
547    for (pendingRequestIterator i = mPendingRequestsList.begin();
548            i != mPendingRequestsList.end();) {
549        i = erasePendingRequest(i);
550    }
551    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
552        if (mDefaultMetadata[i])
553            free_camera_metadata(mDefaultMetadata[i]);
554
555    m_perfLock.lock_rel();
556    m_perfLock.lock_deinit();
557
558    pthread_cond_destroy(&mRequestCond);
559
560    pthread_cond_destroy(&mBuffersCond);
561
562    pthread_mutex_destroy(&mMutex);
563    LOGD("X");
564}
565
566/*===========================================================================
567 * FUNCTION   : erasePendingRequest
568 *
569 * DESCRIPTION: function to erase a desired pending request after freeing any
570 *              allocated memory
571 *
572 * PARAMETERS :
573 *   @i       : iterator pointing to pending request to be erased
574 *
575 * RETURN     : iterator pointing to the next request
576 *==========================================================================*/
577QCamera3HardwareInterface::pendingRequestIterator
578        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
579{
580    if (i->input_buffer != NULL) {
581        free(i->input_buffer);
582        i->input_buffer = NULL;
583    }
584    if (i->settings != NULL)
585        free_camera_metadata((camera_metadata_t*)i->settings);
586    return mPendingRequestsList.erase(i);
587}
588
589/*===========================================================================
590 * FUNCTION   : camEvtHandle
591 *
592 * DESCRIPTION: Function registered to mm-camera-interface to handle events
593 *
594 * PARAMETERS :
595 *   @camera_handle : interface layer camera handle
596 *   @evt           : ptr to event
597 *   @user_data     : user data ptr
598 *
599 * RETURN     : none
600 *==========================================================================*/
601void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
602                                          mm_camera_event_t *evt,
603                                          void *user_data)
604{
605    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
606    if (obj && evt) {
607        switch(evt->server_event_type) {
608            case CAM_EVENT_TYPE_DAEMON_DIED:
609                pthread_mutex_lock(&obj->mMutex);
610                obj->mState = ERROR;
611                pthread_mutex_unlock(&obj->mMutex);
612                LOGE("Fatal, camera daemon died");
613                break;
614
615            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
616                LOGD("HAL got request pull from Daemon");
617                pthread_mutex_lock(&obj->mMutex);
618                obj->mWokenUpByDaemon = true;
619                obj->unblockRequestIfNecessary();
620                pthread_mutex_unlock(&obj->mMutex);
621                break;
622
623            default:
624                LOGW("Warning: Unhandled event %d",
625                        evt->server_event_type);
626                break;
627        }
628    } else {
629        LOGE("NULL user_data/evt");
630    }
631}
632
633/*===========================================================================
634 * FUNCTION   : openCamera
635 *
636 * DESCRIPTION: open camera
637 *
638 * PARAMETERS :
639 *   @hw_device  : double ptr for camera device struct
640 *
641 * RETURN     : int32_t type of status
642 *              NO_ERROR  -- success
643 *              none-zero failure code
644 *==========================================================================*/
645int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
646{
647    int rc = 0;
648    if (mState != CLOSED) {
649        *hw_device = NULL;
650        return PERMISSION_DENIED;
651    }
652
653    m_perfLock.lock_acq();
654    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
655             mCameraId);
656
657    rc = openCamera();
658    if (rc == 0) {
659        *hw_device = &mCameraDevice.common;
660    } else
661        *hw_device = NULL;
662
663    m_perfLock.lock_rel();
664    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
665             mCameraId, rc);
666
667    if (rc == NO_ERROR) {
668        mState = OPENED;
669    }
670    return rc;
671}
672
673/*===========================================================================
674 * FUNCTION   : openCamera
675 *
676 * DESCRIPTION: open camera
677 *
678 * PARAMETERS : none
679 *
680 * RETURN     : int32_t type of status
681 *              NO_ERROR  -- success
682 *              none-zero failure code
683 *==========================================================================*/
684int QCamera3HardwareInterface::openCamera()
685{
686    int rc = 0;
687    char value[PROPERTY_VALUE_MAX];
688
689    KPI_ATRACE_CALL();
690    if (mCameraHandle) {
691        LOGE("Failure: Camera already opened");
692        return ALREADY_EXISTS;
693    }
694
695    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
696    if (rc < 0) {
697        LOGE("Failed to reserve flash for camera id: %d",
698                mCameraId);
699        return UNKNOWN_ERROR;
700    }
701
702    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
703    if (rc) {
704        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
705        return rc;
706    }
707
708    if (!mCameraHandle) {
709        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
710        return -ENODEV;
711    }
712
713    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
714            camEvtHandle, (void *)this);
715
716    if (rc < 0) {
717        LOGE("Error, failed to register event callback");
718        /* Not closing camera here since it is already handled in destructor */
719        return FAILED_TRANSACTION;
720    }
721
722    mExifParams.debug_params =
723            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
724    if (mExifParams.debug_params) {
725        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
726    } else {
727        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
728        return NO_MEMORY;
729    }
730    mFirstConfiguration = true;
731
732    //Notify display HAL that a camera session is active.
733    //But avoid calling the same during bootup because camera service might open/close
734    //cameras at boot time during its initialization and display service will also internally
735    //wait for camera service to initialize first while calling this display API, resulting in a
736    //deadlock situation. Since boot time camera open/close calls are made only to fetch
737    //capabilities, no need of this display bw optimization.
738    //Use "service.bootanim.exit" property to know boot status.
739    property_get("service.bootanim.exit", value, "0");
740    if (atoi(value) == 1) {
741        pthread_mutex_lock(&gCamLock);
742        if (gNumCameraSessions++ == 0) {
743            setCameraLaunchStatus(true);
744        }
745        pthread_mutex_unlock(&gCamLock);
746    }
747
748    return NO_ERROR;
749}
750
751/*===========================================================================
752 * FUNCTION   : closeCamera
753 *
754 * DESCRIPTION: close camera
755 *
756 * PARAMETERS : none
757 *
758 * RETURN     : int32_t type of status
759 *              NO_ERROR  -- success
760 *              none-zero failure code
761 *==========================================================================*/
762int QCamera3HardwareInterface::closeCamera()
763{
764    KPI_ATRACE_CALL();
765    int rc = NO_ERROR;
766    char value[PROPERTY_VALUE_MAX];
767
768    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
769             mCameraId);
770    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
771    mCameraHandle = NULL;
772
773    //Notify display HAL that there is no active camera session
774    //but avoid calling the same during bootup. Refer to openCamera
775    //for more details.
776    property_get("service.bootanim.exit", value, "0");
777    if (atoi(value) == 1) {
778        pthread_mutex_lock(&gCamLock);
779        if (--gNumCameraSessions == 0) {
780            setCameraLaunchStatus(false);
781        }
782        pthread_mutex_unlock(&gCamLock);
783    }
784
785    if (mExifParams.debug_params) {
786        free(mExifParams.debug_params);
787        mExifParams.debug_params = NULL;
788    }
789    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
790        LOGW("Failed to release flash for camera id: %d",
791                mCameraId);
792    }
793    mState = CLOSED;
794    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
795         mCameraId, rc);
796    return rc;
797}
798
799/*===========================================================================
800 * FUNCTION   : initialize
801 *
802 * DESCRIPTION: Initialize frameworks callback functions
803 *
804 * PARAMETERS :
805 *   @callback_ops : callback function to frameworks
806 *
807 * RETURN     :
808 *
809 *==========================================================================*/
810int QCamera3HardwareInterface::initialize(
811        const struct camera3_callback_ops *callback_ops)
812{
813    ATRACE_CALL();
814    int rc;
815
816    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
817    pthread_mutex_lock(&mMutex);
818
819    // Validate current state
820    switch (mState) {
821        case OPENED:
822            /* valid state */
823            break;
824
825        case ERROR:
826            pthread_mutex_unlock(&mMutex);
827            handleCameraDeviceError();
828            rc = -ENODEV;
829            goto err2;
830
831        default:
832            LOGE("Invalid state %d", mState);
833            rc = -ENODEV;
834            goto err1;
835    }
836
837    rc = initParameters();
838    if (rc < 0) {
839        LOGE("initParamters failed %d", rc);
840        goto err1;
841    }
842    mCallbackOps = callback_ops;
843
844    mChannelHandle = mCameraHandle->ops->add_channel(
845            mCameraHandle->camera_handle, NULL, NULL, this);
846    if (mChannelHandle == 0) {
847        LOGE("add_channel failed");
848        rc = -ENOMEM;
849        pthread_mutex_unlock(&mMutex);
850        return rc;
851    }
852
853    pthread_mutex_unlock(&mMutex);
854    mCameraInitialized = true;
855    mState = INITIALIZED;
856    LOGI("X");
857    return 0;
858
859err1:
860    pthread_mutex_unlock(&mMutex);
861err2:
862    return rc;
863}
864
865/*===========================================================================
866 * FUNCTION   : validateStreamDimensions
867 *
868 * DESCRIPTION: Check if the configuration requested are those advertised
869 *
870 * PARAMETERS :
871 *   @stream_list : streams to be configured
872 *
873 * RETURN     :
874 *
875 *==========================================================================*/
876int QCamera3HardwareInterface::validateStreamDimensions(
877        camera3_stream_configuration_t *streamList)
878{
879    int rc = NO_ERROR;
880    size_t count = 0;
881
882    camera3_stream_t *inputStream = NULL;
883    /*
884    * Loop through all streams to find input stream if it exists*
885    */
886    for (size_t i = 0; i< streamList->num_streams; i++) {
887        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
888            if (inputStream != NULL) {
889                LOGE("Error, Multiple input streams requested");
890                return -EINVAL;
891            }
892            inputStream = streamList->streams[i];
893        }
894    }
895    /*
896    * Loop through all streams requested in configuration
897    * Check if unsupported sizes have been requested on any of them
898    */
899    for (size_t j = 0; j < streamList->num_streams; j++) {
900        bool sizeFound = false;
901        camera3_stream_t *newStream = streamList->streams[j];
902
903        uint32_t rotatedHeight = newStream->height;
904        uint32_t rotatedWidth = newStream->width;
905        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
906                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
907            rotatedHeight = newStream->width;
908            rotatedWidth = newStream->height;
909        }
910
911        /*
912        * Sizes are different for each type of stream format check against
913        * appropriate table.
914        */
915        switch (newStream->format) {
916        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
917        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
918        case HAL_PIXEL_FORMAT_RAW10:
919            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
920            for (size_t i = 0; i < count; i++) {
921                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
922                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
923                    sizeFound = true;
924                    break;
925                }
926            }
927            break;
928        case HAL_PIXEL_FORMAT_BLOB:
929            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
930            /* Verify set size against generated sizes table */
931            for (size_t i = 0; i < count; i++) {
932                if (((int32_t)rotatedWidth ==
933                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
934                        ((int32_t)rotatedHeight ==
935                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
936                    sizeFound = true;
937                    break;
938                }
939            }
940            break;
941        case HAL_PIXEL_FORMAT_YCbCr_420_888:
942        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
943        default:
944            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
945                    || newStream->stream_type == CAMERA3_STREAM_INPUT
946                    || IS_USAGE_ZSL(newStream->usage)) {
947                if (((int32_t)rotatedWidth ==
948                                gCamCapability[mCameraId]->active_array_size.width) &&
949                                ((int32_t)rotatedHeight ==
950                                gCamCapability[mCameraId]->active_array_size.height)) {
951                    sizeFound = true;
952                    break;
953                }
954                /* We could potentially break here to enforce ZSL stream
955                 * set from frameworks always is full active array size
956                 * but it is not clear from the spc if framework will always
957                 * follow that, also we have logic to override to full array
958                 * size, so keeping the logic lenient at the moment
959                 */
960            }
961            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
962                    MAX_SIZES_CNT);
963            for (size_t i = 0; i < count; i++) {
964                if (((int32_t)rotatedWidth ==
965                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
966                            ((int32_t)rotatedHeight ==
967                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
968                    sizeFound = true;
969                    break;
970                }
971            }
972            break;
973        } /* End of switch(newStream->format) */
974
975        /* We error out even if a single stream has unsupported size set */
976        if (!sizeFound) {
977            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
978                    rotatedWidth, rotatedHeight, newStream->format,
979                    gCamCapability[mCameraId]->active_array_size.width,
980                    gCamCapability[mCameraId]->active_array_size.height);
981            rc = -EINVAL;
982            break;
983        }
984    } /* End of for each stream */
985    return rc;
986}
987
988/*==============================================================================
989 * FUNCTION   : isSupportChannelNeeded
990 *
991 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
992 *
993 * PARAMETERS :
994 *   @stream_list : streams to be configured
995 *   @stream_config_info : the config info for streams to be configured
996 *
997 * RETURN     : Boolen true/false decision
998 *
999 *==========================================================================*/
1000bool QCamera3HardwareInterface::isSupportChannelNeeded(
1001        camera3_stream_configuration_t *streamList,
1002        cam_stream_size_info_t stream_config_info)
1003{
1004    uint32_t i;
1005    bool pprocRequested = false;
1006    /* Check for conditions where PProc pipeline does not have any streams*/
1007    for (i = 0; i < stream_config_info.num_streams; i++) {
1008        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1009                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1010            pprocRequested = true;
1011            break;
1012        }
1013    }
1014
1015    if (pprocRequested == false )
1016        return true;
1017
1018    /* Dummy stream needed if only raw or jpeg streams present */
1019    for (i = 0; i < streamList->num_streams; i++) {
1020        switch(streamList->streams[i]->format) {
1021            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1022            case HAL_PIXEL_FORMAT_RAW10:
1023            case HAL_PIXEL_FORMAT_RAW16:
1024            case HAL_PIXEL_FORMAT_BLOB:
1025                break;
1026            default:
1027                return false;
1028        }
1029    }
1030    return true;
1031}
1032
1033/*==============================================================================
1034 * FUNCTION   : getSensorOutputSize
1035 *
1036 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1037 *
1038 * PARAMETERS :
1039 *   @sensor_dim : sensor output dimension (output)
1040 *
1041 * RETURN     : int32_t type of status
1042 *              NO_ERROR  -- success
1043 *              none-zero failure code
1044 *
1045 *==========================================================================*/
1046int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1047{
1048    int32_t rc = NO_ERROR;
1049
1050    cam_dimension_t max_dim = {0, 0};
1051    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1052        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1053            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1054        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1055            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1056    }
1057
1058    clear_metadata_buffer(mParameters);
1059
1060    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1061            max_dim);
1062    if (rc != NO_ERROR) {
1063        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1064        return rc;
1065    }
1066
1067    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1068    if (rc != NO_ERROR) {
1069        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1070        return rc;
1071    }
1072
1073    clear_metadata_buffer(mParameters);
1074    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1075
1076    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1077            mParameters);
1078    if (rc != NO_ERROR) {
1079        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1080        return rc;
1081    }
1082
1083    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1084    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1085
1086    return rc;
1087}
1088
1089/*==============================================================================
1090 * FUNCTION   : enablePowerHint
1091 *
1092 * DESCRIPTION: enable single powerhint for preview and different video modes.
1093 *
1094 * PARAMETERS :
1095 *
1096 * RETURN     : NULL
1097 *
1098 *==========================================================================*/
1099void QCamera3HardwareInterface::enablePowerHint()
1100{
1101    if (!mPowerHintEnabled) {
1102        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1103        mPowerHintEnabled = true;
1104    }
1105}
1106
1107/*==============================================================================
1108 * FUNCTION   : disablePowerHint
1109 *
1110 * DESCRIPTION: disable current powerhint.
1111 *
1112 * PARAMETERS :
1113 *
1114 * RETURN     : NULL
1115 *
1116 *==========================================================================*/
1117void QCamera3HardwareInterface::disablePowerHint()
1118{
1119    if (mPowerHintEnabled) {
1120        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1121        mPowerHintEnabled = false;
1122    }
1123}
1124
1125/*==============================================================================
1126 * FUNCTION   : addToPPFeatureMask
1127 *
1128 * DESCRIPTION: add additional features to pp feature mask based on
1129 *              stream type and usecase
1130 *
1131 * PARAMETERS :
1132 *   @stream_format : stream type for feature mask
1133 *   @stream_idx : stream idx within postprocess_mask list to change
1134 *
1135 * RETURN     : NULL
1136 *
1137 *==========================================================================*/
1138void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1139        uint32_t stream_idx)
1140{
1141    char feature_mask_value[PROPERTY_VALUE_MAX];
1142    uint32_t feature_mask;
1143    int args_converted;
1144    int property_len;
1145
1146    /* Get feature mask from property */
1147    property_len = property_get("persist.camera.hal3.feature",
1148            feature_mask_value, "0");
1149    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1150            (feature_mask_value[1] == 'x')) {
1151        args_converted = sscanf(feature_mask_value, "0x%x", &feature_mask);
1152    } else {
1153        args_converted = sscanf(feature_mask_value, "%d", &feature_mask);
1154    }
1155    if (1 != args_converted) {
1156        feature_mask = 0;
1157        LOGE("Wrong feature mask %s", feature_mask_value);
1158        return;
1159    }
1160
1161    switch (stream_format) {
1162    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1163        /* Add LLVD to pp feature mask only if video hint is enabled */
1164        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1165            mStreamConfigInfo.postprocess_mask[stream_idx]
1166                    |= CAM_QTI_FEATURE_SW_TNR;
1167            LOGH("Added SW TNR to pp feature mask");
1168        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1169            mStreamConfigInfo.postprocess_mask[stream_idx]
1170                    |= CAM_QCOM_FEATURE_LLVD;
1171            LOGH("Added LLVD SeeMore to pp feature mask");
1172        }
1173        break;
1174    }
1175    default:
1176        break;
1177    }
1178    LOGD("PP feature mask %x",
1179            mStreamConfigInfo.postprocess_mask[stream_idx]);
1180}
1181
1182/*==============================================================================
1183 * FUNCTION   : updateFpsInPreviewBuffer
1184 *
1185 * DESCRIPTION: update FPS information in preview buffer.
1186 *
1187 * PARAMETERS :
1188 *   @metadata    : pointer to metadata buffer
1189 *   @frame_number: frame_number to look for in pending buffer list
1190 *
1191 * RETURN     : None
1192 *
1193 *==========================================================================*/
1194void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1195        uint32_t frame_number)
1196{
1197    // Mark all pending buffers for this particular request
1198    // with corresponding framerate information
1199    for (List<PendingBuffersInRequest>::iterator req =
1200            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1201            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1202        for(List<PendingBufferInfo>::iterator j =
1203                req->mPendingBufferList.begin();
1204                j != req->mPendingBufferList.end(); j++) {
1205            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1206            if ((req->frame_number == frame_number) &&
1207                (channel->getStreamTypeMask() &
1208                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1209                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1210                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1211                    int32_t cameraFps = float_range->max_fps;
1212                    struct private_handle_t *priv_handle =
1213                        (struct private_handle_t *)(*(j->buffer));
1214                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1215                }
1216            }
1217        }
1218    }
1219}
1220
1221/*===========================================================================
1222 * FUNCTION   : configureStreams
1223 *
1224 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1225 *              and output streams.
1226 *
1227 * PARAMETERS :
1228 *   @stream_list : streams to be configured
1229 *
1230 * RETURN     :
1231 *
1232 *==========================================================================*/
1233int QCamera3HardwareInterface::configureStreams(
1234        camera3_stream_configuration_t *streamList)
1235{
1236    ATRACE_CALL();
1237    int rc = 0;
1238
1239    // Acquire perfLock before configure streams
1240    m_perfLock.lock_acq();
1241    rc = configureStreamsPerfLocked(streamList);
1242    m_perfLock.lock_rel();
1243
1244    return rc;
1245}
1246
1247/*===========================================================================
1248 * FUNCTION   : configureStreamsPerfLocked
1249 *
1250 * DESCRIPTION: configureStreams while perfLock is held.
1251 *
1252 * PARAMETERS :
1253 *   @stream_list : streams to be configured
1254 *
1255 * RETURN     : int32_t type of status
1256 *              NO_ERROR  -- success
1257 *              none-zero failure code
1258 *==========================================================================*/
1259int QCamera3HardwareInterface::configureStreamsPerfLocked(
1260        camera3_stream_configuration_t *streamList)
1261{
1262    ATRACE_CALL();
1263    int rc = 0;
1264
1265    // Sanity check stream_list
1266    if (streamList == NULL) {
1267        LOGE("NULL stream configuration");
1268        return BAD_VALUE;
1269    }
1270    if (streamList->streams == NULL) {
1271        LOGE("NULL stream list");
1272        return BAD_VALUE;
1273    }
1274
1275    if (streamList->num_streams < 1) {
1276        LOGE("Bad number of streams requested: %d",
1277                streamList->num_streams);
1278        return BAD_VALUE;
1279    }
1280
1281    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1282        LOGE("Maximum number of streams %d exceeded: %d",
1283                MAX_NUM_STREAMS, streamList->num_streams);
1284        return BAD_VALUE;
1285    }
1286
1287    mOpMode = streamList->operation_mode;
1288    LOGD("mOpMode: %d", mOpMode);
1289
1290    /* first invalidate all the steams in the mStreamList
1291     * if they appear again, they will be validated */
1292    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1293            it != mStreamInfo.end(); it++) {
1294        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1295        channel->stop();
1296        (*it)->status = INVALID;
1297    }
1298
1299    if (mRawDumpChannel) {
1300        mRawDumpChannel->stop();
1301        delete mRawDumpChannel;
1302        mRawDumpChannel = NULL;
1303    }
1304
1305    if (mSupportChannel)
1306        mSupportChannel->stop();
1307
1308    if (mAnalysisChannel) {
1309        mAnalysisChannel->stop();
1310    }
1311    if (mMetadataChannel) {
1312        /* If content of mStreamInfo is not 0, there is metadata stream */
1313        mMetadataChannel->stop();
1314    }
1315    if (mChannelHandle) {
1316        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1317                mChannelHandle);
1318        LOGD("stopping channel %d", mChannelHandle);
1319    }
1320
1321    pthread_mutex_lock(&mMutex);
1322
1323    // Check state
1324    switch (mState) {
1325        case INITIALIZED:
1326        case CONFIGURED:
1327        case STARTED:
1328            /* valid state */
1329            break;
1330
1331        case ERROR:
1332            pthread_mutex_unlock(&mMutex);
1333            handleCameraDeviceError();
1334            return -ENODEV;
1335
1336        default:
1337            LOGE("Invalid state %d", mState);
1338            pthread_mutex_unlock(&mMutex);
1339            return -ENODEV;
1340    }
1341
1342    /* Check whether we have video stream */
1343    m_bIs4KVideo = false;
1344    m_bIsVideo = false;
1345    m_bEisSupportedSize = false;
1346    m_bTnrEnabled = false;
1347    bool isZsl = false;
1348    uint32_t videoWidth = 0U;
1349    uint32_t videoHeight = 0U;
1350    size_t rawStreamCnt = 0;
1351    size_t stallStreamCnt = 0;
1352    size_t processedStreamCnt = 0;
1353    // Number of streams on ISP encoder path
1354    size_t numStreamsOnEncoder = 0;
1355    size_t numYuv888OnEncoder = 0;
1356    bool bYuv888OverrideJpeg = false;
1357    cam_dimension_t largeYuv888Size = {0, 0};
1358    cam_dimension_t maxViewfinderSize = {0, 0};
1359    bool bJpegExceeds4K = false;
1360    bool bUseCommonFeatureMask = false;
1361    uint32_t commonFeatureMask = 0;
1362    bool bSmallJpegSize = false;
1363    uint32_t width_ratio;
1364    uint32_t height_ratio;
1365    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1366    camera3_stream_t *inputStream = NULL;
1367    bool isJpeg = false;
1368    cam_dimension_t jpegSize = {0, 0};
1369
1370    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1371
1372    /*EIS configuration*/
1373    bool eisSupported = false;
1374    bool oisSupported = false;
1375    int32_t margin_index = -1;
1376    uint8_t eis_prop_set;
1377    uint32_t maxEisWidth = 0;
1378    uint32_t maxEisHeight = 0;
1379
1380    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1381
1382    size_t count = IS_TYPE_MAX;
1383    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1384    for (size_t i = 0; i < count; i++) {
1385        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1386            eisSupported = true;
1387            margin_index = (int32_t)i;
1388            break;
1389        }
1390    }
1391
1392    count = CAM_OPT_STAB_MAX;
1393    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1394    for (size_t i = 0; i < count; i++) {
1395        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1396            oisSupported = true;
1397            break;
1398        }
1399    }
1400
1401    if (eisSupported) {
1402        maxEisWidth = MAX_EIS_WIDTH;
1403        maxEisHeight = MAX_EIS_HEIGHT;
1404    }
1405
1406    /* EIS setprop control */
1407    char eis_prop[PROPERTY_VALUE_MAX];
1408    memset(eis_prop, 0, sizeof(eis_prop));
1409    property_get("persist.camera.eis.enable", eis_prop, "0");
1410    eis_prop_set = (uint8_t)atoi(eis_prop);
1411
1412    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1413            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1414
1415    /* stream configurations */
1416    for (size_t i = 0; i < streamList->num_streams; i++) {
1417        camera3_stream_t *newStream = streamList->streams[i];
1418        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1419                "height = %d, rotation = %d, usage = 0x%x",
1420                 i, newStream->stream_type, newStream->format,
1421                newStream->width, newStream->height, newStream->rotation,
1422                newStream->usage);
1423        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1424                newStream->stream_type == CAMERA3_STREAM_INPUT){
1425            isZsl = true;
1426        }
1427        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1428            inputStream = newStream;
1429        }
1430
1431        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1432            isJpeg = true;
1433            jpegSize.width = newStream->width;
1434            jpegSize.height = newStream->height;
1435            if (newStream->width > VIDEO_4K_WIDTH ||
1436                    newStream->height > VIDEO_4K_HEIGHT)
1437                bJpegExceeds4K = true;
1438        }
1439
1440        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1441                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1442            m_bIsVideo = true;
1443            videoWidth = newStream->width;
1444            videoHeight = newStream->height;
1445            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1446                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1447                m_bIs4KVideo = true;
1448            }
1449            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1450                                  (newStream->height <= maxEisHeight);
1451        }
1452        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1453                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1454            switch (newStream->format) {
1455            case HAL_PIXEL_FORMAT_BLOB:
1456                stallStreamCnt++;
1457                if (isOnEncoder(maxViewfinderSize, newStream->width,
1458                        newStream->height)) {
1459                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1460                    numStreamsOnEncoder++;
1461                }
1462                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1463                        newStream->width);
1464                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1465                        newStream->height);;
1466                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1467                        "FATAL: max_downscale_factor cannot be zero and so assert");
1468                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1469                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1470                    LOGH("Setting small jpeg size flag to true");
1471                    bSmallJpegSize = true;
1472                }
1473                break;
1474            case HAL_PIXEL_FORMAT_RAW10:
1475            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1476            case HAL_PIXEL_FORMAT_RAW16:
1477                rawStreamCnt++;
1478                break;
1479            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1480                processedStreamCnt++;
1481                if (isOnEncoder(maxViewfinderSize, newStream->width,
1482                        newStream->height)) {
1483                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1484                            IS_USAGE_ZSL(newStream->usage)) {
1485                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1486                    } else {
1487                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1488                    }
1489                    numStreamsOnEncoder++;
1490                }
1491                break;
1492            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1493                processedStreamCnt++;
1494                if (isOnEncoder(maxViewfinderSize, newStream->width,
1495                        newStream->height)) {
1496                    // If Yuv888 size is not greater than 4K, set feature mask
1497                    // to SUPERSET so that it support concurrent request on
1498                    // YUV and JPEG.
1499                    if (newStream->width <= VIDEO_4K_WIDTH &&
1500                            newStream->height <= VIDEO_4K_HEIGHT) {
1501                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1502                    } else {
1503                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1504                    }
1505                    numStreamsOnEncoder++;
1506                    numYuv888OnEncoder++;
1507                    largeYuv888Size.width = newStream->width;
1508                    largeYuv888Size.height = newStream->height;
1509                }
1510                break;
1511            default:
1512                processedStreamCnt++;
1513                if (isOnEncoder(maxViewfinderSize, newStream->width,
1514                        newStream->height)) {
1515                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1516                    numStreamsOnEncoder++;
1517                }
1518                break;
1519            }
1520
1521        }
1522    }
1523
1524    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1525        !m_bIsVideo) {
1526        m_bEisEnable = false;
1527    }
1528
1529    /* Logic to enable/disable TNR based on specific config size/etc.*/
1530    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1531            ((videoWidth == 1920 && videoHeight == 1080) ||
1532            (videoWidth == 1280 && videoHeight == 720)) &&
1533            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1534        m_bTnrEnabled = true;
1535
1536    /* Check if num_streams is sane */
1537    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1538            rawStreamCnt > MAX_RAW_STREAMS ||
1539            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1540        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1541                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1542        pthread_mutex_unlock(&mMutex);
1543        return -EINVAL;
1544    }
1545    /* Check whether we have zsl stream or 4k video case */
1546    if (isZsl && m_bIsVideo) {
1547        LOGE("Currently invalid configuration ZSL&Video!");
1548        pthread_mutex_unlock(&mMutex);
1549        return -EINVAL;
1550    }
1551    /* Check if stream sizes are sane */
1552    if (numStreamsOnEncoder > 2) {
1553        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1554        pthread_mutex_unlock(&mMutex);
1555        return -EINVAL;
1556    } else if (1 < numStreamsOnEncoder){
1557        bUseCommonFeatureMask = true;
1558        LOGH("Multiple streams above max viewfinder size, common mask needed");
1559    }
1560
1561    /* Check if BLOB size is greater than 4k in 4k recording case */
1562    if (m_bIs4KVideo && bJpegExceeds4K) {
1563        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1564        pthread_mutex_unlock(&mMutex);
1565        return -EINVAL;
1566    }
1567
1568    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1569    // the YUV stream's size is greater or equal to the JPEG size, set common
1570    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1571    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1572            jpegSize.width, jpegSize.height) &&
1573            largeYuv888Size.width > jpegSize.width &&
1574            largeYuv888Size.height > jpegSize.height) {
1575        bYuv888OverrideJpeg = true;
1576    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1577        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1578    }
1579
1580    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %x",
1581            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1582            commonFeatureMask);
1583    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1584            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1585
1586    rc = validateStreamDimensions(streamList);
1587    if (rc == NO_ERROR) {
1588        rc = validateStreamRotations(streamList);
1589    }
1590    if (rc != NO_ERROR) {
1591        LOGE("Invalid stream configuration requested!");
1592        pthread_mutex_unlock(&mMutex);
1593        return rc;
1594    }
1595
1596    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1597    camera3_stream_t *jpegStream = NULL;
1598    for (size_t i = 0; i < streamList->num_streams; i++) {
1599        camera3_stream_t *newStream = streamList->streams[i];
1600        LOGH("newStream type = %d, stream format = %d "
1601                "stream size : %d x %d, stream rotation = %d",
1602                 newStream->stream_type, newStream->format,
1603                newStream->width, newStream->height, newStream->rotation);
1604        //if the stream is in the mStreamList validate it
1605        bool stream_exists = false;
1606        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1607                it != mStreamInfo.end(); it++) {
1608            if ((*it)->stream == newStream) {
1609                QCamera3ProcessingChannel *channel =
1610                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1611                stream_exists = true;
1612                if (channel)
1613                    delete channel;
1614                (*it)->status = VALID;
1615                (*it)->stream->priv = NULL;
1616                (*it)->channel = NULL;
1617            }
1618        }
1619        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1620            //new stream
1621            stream_info_t* stream_info;
1622            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1623            if (!stream_info) {
1624               LOGE("Could not allocate stream info");
1625               rc = -ENOMEM;
1626               pthread_mutex_unlock(&mMutex);
1627               return rc;
1628            }
1629            stream_info->stream = newStream;
1630            stream_info->status = VALID;
1631            stream_info->channel = NULL;
1632            mStreamInfo.push_back(stream_info);
1633        }
1634        /* Covers Opaque ZSL and API1 F/W ZSL */
1635        if (IS_USAGE_ZSL(newStream->usage)
1636                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1637            if (zslStream != NULL) {
1638                LOGE("Multiple input/reprocess streams requested!");
1639                pthread_mutex_unlock(&mMutex);
1640                return BAD_VALUE;
1641            }
1642            zslStream = newStream;
1643        }
1644        /* Covers YUV reprocess */
1645        if (inputStream != NULL) {
1646            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1647                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1648                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1649                    && inputStream->width == newStream->width
1650                    && inputStream->height == newStream->height) {
1651                if (zslStream != NULL) {
1652                    /* This scenario indicates multiple YUV streams with same size
1653                     * as input stream have been requested, since zsl stream handle
1654                     * is solely use for the purpose of overriding the size of streams
1655                     * which share h/w streams we will just make a guess here as to
1656                     * which of the stream is a ZSL stream, this will be refactored
1657                     * once we make generic logic for streams sharing encoder output
1658                     */
1659                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1660                }
1661                zslStream = newStream;
1662            }
1663        }
1664        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1665            jpegStream = newStream;
1666        }
1667    }
1668
1669    /* If a zsl stream is set, we know that we have configured at least one input or
1670       bidirectional stream */
1671    if (NULL != zslStream) {
1672        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1673        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1674        mInputStreamInfo.format = zslStream->format;
1675        mInputStreamInfo.usage = zslStream->usage;
1676        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1677                 mInputStreamInfo.dim.width,
1678                mInputStreamInfo.dim.height,
1679                mInputStreamInfo.format, mInputStreamInfo.usage);
1680    }
1681
1682    cleanAndSortStreamInfo();
1683    if (mMetadataChannel) {
1684        delete mMetadataChannel;
1685        mMetadataChannel = NULL;
1686    }
1687    if (mSupportChannel) {
1688        delete mSupportChannel;
1689        mSupportChannel = NULL;
1690    }
1691
1692    if (mAnalysisChannel) {
1693        delete mAnalysisChannel;
1694        mAnalysisChannel = NULL;
1695    }
1696
1697    if (mDummyBatchChannel) {
1698        delete mDummyBatchChannel;
1699        mDummyBatchChannel = NULL;
1700    }
1701
1702    //Create metadata channel and initialize it
1703    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1704                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1705                    &padding_info, CAM_QCOM_FEATURE_NONE, this);
1706    if (mMetadataChannel == NULL) {
1707        LOGE("failed to allocate metadata channel");
1708        rc = -ENOMEM;
1709        pthread_mutex_unlock(&mMutex);
1710        return rc;
1711    }
1712    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1713    if (rc < 0) {
1714        LOGE("metadata channel initialization failed");
1715        delete mMetadataChannel;
1716        mMetadataChannel = NULL;
1717        pthread_mutex_unlock(&mMutex);
1718        return rc;
1719    }
1720
1721    // Create analysis stream all the time, even when h/w support is not available
1722    {
1723        mAnalysisChannel = new QCamera3SupportChannel(
1724                mCameraHandle->camera_handle,
1725                mChannelHandle,
1726                mCameraHandle->ops,
1727                &gCamCapability[mCameraId]->analysis_padding_info,
1728                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1729                CAM_STREAM_TYPE_ANALYSIS,
1730                &gCamCapability[mCameraId]->analysis_recommended_res,
1731                (gCamCapability[mCameraId]->analysis_recommended_format
1732                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1733                : CAM_FORMAT_YUV_420_NV21),
1734                gCamCapability[mCameraId]->hw_analysis_supported,
1735                this,
1736                0); // force buffer count to 0
1737        if (!mAnalysisChannel) {
1738            LOGE("H/W Analysis channel cannot be created");
1739            pthread_mutex_unlock(&mMutex);
1740            return -ENOMEM;
1741        }
1742    }
1743
1744    bool isRawStreamRequested = false;
1745    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1746    /* Allocate channel objects for the requested streams */
1747    for (size_t i = 0; i < streamList->num_streams; i++) {
1748        camera3_stream_t *newStream = streamList->streams[i];
1749        uint32_t stream_usage = newStream->usage;
1750        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1751        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1752        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1753                || IS_USAGE_ZSL(newStream->usage)) &&
1754            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1755            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1756            if (bUseCommonFeatureMask) {
1757                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1758                        commonFeatureMask;
1759            } else {
1760                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1761                        CAM_QCOM_FEATURE_NONE;
1762            }
1763
1764        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1765                LOGH("Input stream configured, reprocess config");
1766        } else {
1767            //for non zsl streams find out the format
1768            switch (newStream->format) {
1769            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1770            {
1771                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1772                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1773                /* add additional features to pp feature mask */
1774                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1775                        mStreamConfigInfo.num_streams);
1776
1777                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1778                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1779                                CAM_STREAM_TYPE_VIDEO;
1780                    if (m_bTnrEnabled && m_bTnrVideo) {
1781                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1782                            CAM_QCOM_FEATURE_CPP_TNR;
1783                    }
1784                } else {
1785                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1786                            CAM_STREAM_TYPE_PREVIEW;
1787                    if (m_bTnrEnabled && m_bTnrPreview) {
1788                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1789                                CAM_QCOM_FEATURE_CPP_TNR;
1790                    }
1791                    padding_info.width_padding = mSurfaceStridePadding;
1792                    padding_info.height_padding = CAM_PAD_TO_2;
1793                }
1794                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1795                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1796                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1797                            newStream->height;
1798                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1799                            newStream->width;
1800                }
1801            }
1802            break;
1803            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1804                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1805                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1806                    if (bUseCommonFeatureMask)
1807                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1808                                commonFeatureMask;
1809                    else
1810                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1811                                CAM_QCOM_FEATURE_NONE;
1812                } else {
1813                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1814                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1815                }
1816            break;
1817            case HAL_PIXEL_FORMAT_BLOB:
1818                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1819                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1820                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1821                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1822                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1823                } else {
1824                    if (bUseCommonFeatureMask &&
1825                            isOnEncoder(maxViewfinderSize, newStream->width,
1826                            newStream->height)) {
1827                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1828                    } else {
1829                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1830                    }
1831                }
1832                if (isZsl) {
1833                    if (zslStream) {
1834                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1835                                (int32_t)zslStream->width;
1836                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1837                                (int32_t)zslStream->height;
1838                    } else {
1839                        LOGE("Error, No ZSL stream identified");
1840                        pthread_mutex_unlock(&mMutex);
1841                        return -EINVAL;
1842                    }
1843                } else if (m_bIs4KVideo) {
1844                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1845                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1846                } else if (bYuv888OverrideJpeg) {
1847                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1848                            (int32_t)largeYuv888Size.width;
1849                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1850                            (int32_t)largeYuv888Size.height;
1851                }
1852                break;
1853            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1854            case HAL_PIXEL_FORMAT_RAW16:
1855            case HAL_PIXEL_FORMAT_RAW10:
1856                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1857                isRawStreamRequested = true;
1858                break;
1859            default:
1860                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1861                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1862                break;
1863            }
1864        }
1865
1866        if (newStream->priv == NULL) {
1867            //New stream, construct channel
1868            switch (newStream->stream_type) {
1869            case CAMERA3_STREAM_INPUT:
1870                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1871                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1872                break;
1873            case CAMERA3_STREAM_BIDIRECTIONAL:
1874                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1875                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1876                break;
1877            case CAMERA3_STREAM_OUTPUT:
1878                /* For video encoding stream, set read/write rarely
1879                 * flag so that they may be set to un-cached */
1880                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1881                    newStream->usage |=
1882                         (GRALLOC_USAGE_SW_READ_RARELY |
1883                         GRALLOC_USAGE_SW_WRITE_RARELY |
1884                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1885                else if (IS_USAGE_ZSL(newStream->usage))
1886                {
1887                    LOGD("ZSL usage flag skipping");
1888                }
1889                else if (newStream == zslStream
1890                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1891                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1892                } else
1893                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1894                break;
1895            default:
1896                LOGE("Invalid stream_type %d", newStream->stream_type);
1897                break;
1898            }
1899
1900            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1901                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1902                QCamera3ProcessingChannel *channel = NULL;
1903                switch (newStream->format) {
1904                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1905                    if ((newStream->usage &
1906                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1907                            (streamList->operation_mode ==
1908                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1909                    ) {
1910                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1911                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1912                                &gCamCapability[mCameraId]->padding_info,
1913                                this,
1914                                newStream,
1915                                (cam_stream_type_t)
1916                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1917                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1918                                mMetadataChannel,
1919                                0); //heap buffers are not required for HFR video channel
1920                        if (channel == NULL) {
1921                            LOGE("allocation of channel failed");
1922                            pthread_mutex_unlock(&mMutex);
1923                            return -ENOMEM;
1924                        }
1925                        //channel->getNumBuffers() will return 0 here so use
1926                        //MAX_INFLIGH_HFR_REQUESTS
1927                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1928                        newStream->priv = channel;
1929                        LOGI("num video buffers in HFR mode: %d",
1930                                 MAX_INFLIGHT_HFR_REQUESTS);
1931                    } else {
1932                        /* Copy stream contents in HFR preview only case to create
1933                         * dummy batch channel so that sensor streaming is in
1934                         * HFR mode */
1935                        if (!m_bIsVideo && (streamList->operation_mode ==
1936                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1937                            mDummyBatchStream = *newStream;
1938                        }
1939                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1940                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1941                                &gCamCapability[mCameraId]->padding_info,
1942                                this,
1943                                newStream,
1944                                (cam_stream_type_t)
1945                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1946                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1947                                mMetadataChannel,
1948                                MAX_INFLIGHT_REQUESTS);
1949                        if (channel == NULL) {
1950                            LOGE("allocation of channel failed");
1951                            pthread_mutex_unlock(&mMutex);
1952                            return -ENOMEM;
1953                        }
1954                        newStream->max_buffers = channel->getNumBuffers();
1955                        newStream->priv = channel;
1956                    }
1957                    break;
1958                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1959                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1960                            mChannelHandle,
1961                            mCameraHandle->ops, captureResultCb,
1962                            &padding_info,
1963                            this,
1964                            newStream,
1965                            (cam_stream_type_t)
1966                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1967                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1968                            mMetadataChannel);
1969                    if (channel == NULL) {
1970                        LOGE("allocation of YUV channel failed");
1971                        pthread_mutex_unlock(&mMutex);
1972                        return -ENOMEM;
1973                    }
1974                    newStream->max_buffers = channel->getNumBuffers();
1975                    newStream->priv = channel;
1976                    break;
1977                }
1978                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1979                case HAL_PIXEL_FORMAT_RAW16:
1980                case HAL_PIXEL_FORMAT_RAW10:
1981                    mRawChannel = new QCamera3RawChannel(
1982                            mCameraHandle->camera_handle, mChannelHandle,
1983                            mCameraHandle->ops, captureResultCb,
1984                            &padding_info,
1985                            this, newStream, CAM_QCOM_FEATURE_NONE,
1986                            mMetadataChannel,
1987                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1988                    if (mRawChannel == NULL) {
1989                        LOGE("allocation of raw channel failed");
1990                        pthread_mutex_unlock(&mMutex);
1991                        return -ENOMEM;
1992                    }
1993                    newStream->max_buffers = mRawChannel->getNumBuffers();
1994                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1995                    break;
1996                case HAL_PIXEL_FORMAT_BLOB:
1997                    // Max live snapshot inflight buffer is 1. This is to mitigate
1998                    // frame drop issues for video snapshot. The more buffers being
1999                    // allocated, the more frame drops there are.
2000                    mPictureChannel = new QCamera3PicChannel(
2001                            mCameraHandle->camera_handle, mChannelHandle,
2002                            mCameraHandle->ops, captureResultCb,
2003                            &padding_info, this, newStream,
2004                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2005                            m_bIs4KVideo, isZsl, mMetadataChannel,
2006                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2007                    if (mPictureChannel == NULL) {
2008                        LOGE("allocation of channel failed");
2009                        pthread_mutex_unlock(&mMutex);
2010                        return -ENOMEM;
2011                    }
2012                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2013                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2014                    mPictureChannel->overrideYuvSize(
2015                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2016                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2017                    break;
2018
2019                default:
2020                    LOGE("not a supported format 0x%x", newStream->format);
2021                    break;
2022                }
2023            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2024                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2025            } else {
2026                LOGE("Error, Unknown stream type");
2027                pthread_mutex_unlock(&mMutex);
2028                return -EINVAL;
2029            }
2030
2031            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2032            if (channel != NULL && channel->isUBWCEnabled()) {
2033                cam_format_t fmt = channel->getStreamDefaultFormat(
2034                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
2035                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2036                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2037                }
2038            }
2039
2040            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2041                    it != mStreamInfo.end(); it++) {
2042                if ((*it)->stream == newStream) {
2043                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2044                    break;
2045                }
2046            }
2047        } else {
2048            // Channel already exists for this stream
2049            // Do nothing for now
2050        }
2051        padding_info = gCamCapability[mCameraId]->padding_info;
2052
2053        /* Do not add entries for input stream in metastream info
2054         * since there is no real stream associated with it
2055         */
2056        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2057            mStreamConfigInfo.num_streams++;
2058    }
2059
2060    //RAW DUMP channel
2061    if (mEnableRawDump && isRawStreamRequested == false){
2062        cam_dimension_t rawDumpSize;
2063        rawDumpSize = getMaxRawSize(mCameraId);
2064        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2065                                  mChannelHandle,
2066                                  mCameraHandle->ops,
2067                                  rawDumpSize,
2068                                  &padding_info,
2069                                  this, CAM_QCOM_FEATURE_NONE);
2070        if (!mRawDumpChannel) {
2071            LOGE("Raw Dump channel cannot be created");
2072            pthread_mutex_unlock(&mMutex);
2073            return -ENOMEM;
2074        }
2075    }
2076
2077
2078    if (mAnalysisChannel) {
2079        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2080                gCamCapability[mCameraId]->analysis_recommended_res;
2081        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2082                CAM_STREAM_TYPE_ANALYSIS;
2083        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2084                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2085        mStreamConfigInfo.num_streams++;
2086    }
2087
2088    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2089        mSupportChannel = new QCamera3SupportChannel(
2090                mCameraHandle->camera_handle,
2091                mChannelHandle,
2092                mCameraHandle->ops,
2093                &gCamCapability[mCameraId]->padding_info,
2094                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
2095                CAM_STREAM_TYPE_CALLBACK,
2096                &QCamera3SupportChannel::kDim,
2097                CAM_FORMAT_YUV_420_NV21,
2098                gCamCapability[mCameraId]->hw_analysis_supported,
2099                this);
2100        if (!mSupportChannel) {
2101            LOGE("dummy channel cannot be created");
2102            pthread_mutex_unlock(&mMutex);
2103            return -ENOMEM;
2104        }
2105    }
2106
2107    if (mSupportChannel) {
2108        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2109                QCamera3SupportChannel::kDim;
2110        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2111                CAM_STREAM_TYPE_CALLBACK;
2112        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2113                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2114        mStreamConfigInfo.num_streams++;
2115    }
2116
2117    if (mRawDumpChannel) {
2118        cam_dimension_t rawSize;
2119        rawSize = getMaxRawSize(mCameraId);
2120        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2121                rawSize;
2122        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2123                CAM_STREAM_TYPE_RAW;
2124        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2125                CAM_QCOM_FEATURE_NONE;
2126        mStreamConfigInfo.num_streams++;
2127    }
2128    /* In HFR mode, if video stream is not added, create a dummy channel so that
2129     * ISP can create a batch mode even for preview only case. This channel is
2130     * never 'start'ed (no stream-on), it is only 'initialized'  */
2131    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2132            !m_bIsVideo) {
2133        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2134                mChannelHandle,
2135                mCameraHandle->ops, captureResultCb,
2136                &gCamCapability[mCameraId]->padding_info,
2137                this,
2138                &mDummyBatchStream,
2139                CAM_STREAM_TYPE_VIDEO,
2140                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
2141                mMetadataChannel);
2142        if (NULL == mDummyBatchChannel) {
2143            LOGE("creation of mDummyBatchChannel failed."
2144                    "Preview will use non-hfr sensor mode ");
2145        }
2146    }
2147    if (mDummyBatchChannel) {
2148        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2149                mDummyBatchStream.width;
2150        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2151                mDummyBatchStream.height;
2152        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2153                CAM_STREAM_TYPE_VIDEO;
2154        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2155                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2156        mStreamConfigInfo.num_streams++;
2157    }
2158
2159    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2160    mStreamConfigInfo.buffer_info.max_buffers =
2161            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2162
2163    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2164    for (pendingRequestIterator i = mPendingRequestsList.begin();
2165            i != mPendingRequestsList.end();) {
2166        i = erasePendingRequest(i);
2167    }
2168    mPendingFrameDropList.clear();
2169    // Initialize/Reset the pending buffers list
2170    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2171        req.mPendingBufferList.clear();
2172    }
2173    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2174
2175    mPendingReprocessResultList.clear();
2176
2177    mCurJpegMeta.clear();
2178    //Get min frame duration for this streams configuration
2179    deriveMinFrameDuration();
2180
2181    // Update state
2182    mState = CONFIGURED;
2183
2184    pthread_mutex_unlock(&mMutex);
2185
2186    return rc;
2187}
2188
2189/*===========================================================================
2190 * FUNCTION   : validateCaptureRequest
2191 *
2192 * DESCRIPTION: validate a capture request from camera service
2193 *
2194 * PARAMETERS :
2195 *   @request : request from framework to process
2196 *
2197 * RETURN     :
2198 *
2199 *==========================================================================*/
2200int QCamera3HardwareInterface::validateCaptureRequest(
2201                    camera3_capture_request_t *request)
2202{
2203    ssize_t idx = 0;
2204    const camera3_stream_buffer_t *b;
2205    CameraMetadata meta;
2206
2207    /* Sanity check the request */
2208    if (request == NULL) {
2209        LOGE("NULL capture request");
2210        return BAD_VALUE;
2211    }
2212
2213    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2214        /*settings cannot be null for the first request*/
2215        return BAD_VALUE;
2216    }
2217
2218    uint32_t frameNumber = request->frame_number;
2219    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2220        LOGE("Request %d: No output buffers provided!",
2221                __FUNCTION__, frameNumber);
2222        return BAD_VALUE;
2223    }
2224    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2225        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2226                 request->num_output_buffers, MAX_NUM_STREAMS);
2227        return BAD_VALUE;
2228    }
2229    if (request->input_buffer != NULL) {
2230        b = request->input_buffer;
2231        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2232            LOGE("Request %d: Buffer %ld: Status not OK!",
2233                     frameNumber, (long)idx);
2234            return BAD_VALUE;
2235        }
2236        if (b->release_fence != -1) {
2237            LOGE("Request %d: Buffer %ld: Has a release fence!",
2238                     frameNumber, (long)idx);
2239            return BAD_VALUE;
2240        }
2241        if (b->buffer == NULL) {
2242            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2243                     frameNumber, (long)idx);
2244            return BAD_VALUE;
2245        }
2246    }
2247
2248    // Validate all buffers
2249    b = request->output_buffers;
2250    do {
2251        QCamera3ProcessingChannel *channel =
2252                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2253        if (channel == NULL) {
2254            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2255                     frameNumber, (long)idx);
2256            return BAD_VALUE;
2257        }
2258        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2259            LOGE("Request %d: Buffer %ld: Status not OK!",
2260                     frameNumber, (long)idx);
2261            return BAD_VALUE;
2262        }
2263        if (b->release_fence != -1) {
2264            LOGE("Request %d: Buffer %ld: Has a release fence!",
2265                     frameNumber, (long)idx);
2266            return BAD_VALUE;
2267        }
2268        if (b->buffer == NULL) {
2269            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2270                     frameNumber, (long)idx);
2271            return BAD_VALUE;
2272        }
2273        if (*(b->buffer) == NULL) {
2274            LOGE("Request %d: Buffer %ld: NULL private handle!",
2275                     frameNumber, (long)idx);
2276            return BAD_VALUE;
2277        }
2278        idx++;
2279        b = request->output_buffers + idx;
2280    } while (idx < (ssize_t)request->num_output_buffers);
2281
2282    return NO_ERROR;
2283}
2284
2285/*===========================================================================
2286 * FUNCTION   : deriveMinFrameDuration
2287 *
2288 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2289 *              on currently configured streams.
2290 *
2291 * PARAMETERS : NONE
2292 *
2293 * RETURN     : NONE
2294 *
2295 *==========================================================================*/
2296void QCamera3HardwareInterface::deriveMinFrameDuration()
2297{
2298    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2299
2300    maxJpegDim = 0;
2301    maxProcessedDim = 0;
2302    maxRawDim = 0;
2303
2304    // Figure out maximum jpeg, processed, and raw dimensions
2305    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2306        it != mStreamInfo.end(); it++) {
2307
2308        // Input stream doesn't have valid stream_type
2309        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2310            continue;
2311
2312        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2313        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2314            if (dimension > maxJpegDim)
2315                maxJpegDim = dimension;
2316        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2317                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2318                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2319            if (dimension > maxRawDim)
2320                maxRawDim = dimension;
2321        } else {
2322            if (dimension > maxProcessedDim)
2323                maxProcessedDim = dimension;
2324        }
2325    }
2326
2327    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2328            MAX_SIZES_CNT);
2329
2330    //Assume all jpeg dimensions are in processed dimensions.
2331    if (maxJpegDim > maxProcessedDim)
2332        maxProcessedDim = maxJpegDim;
2333    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2334    if (maxProcessedDim > maxRawDim) {
2335        maxRawDim = INT32_MAX;
2336
2337        for (size_t i = 0; i < count; i++) {
2338            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2339                    gCamCapability[mCameraId]->raw_dim[i].height;
2340            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2341                maxRawDim = dimension;
2342        }
2343    }
2344
2345    //Find minimum durations for processed, jpeg, and raw
2346    for (size_t i = 0; i < count; i++) {
2347        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2348                gCamCapability[mCameraId]->raw_dim[i].height) {
2349            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2350            break;
2351        }
2352    }
2353    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2354    for (size_t i = 0; i < count; i++) {
2355        if (maxProcessedDim ==
2356                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2357                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2358            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2359            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2360            break;
2361        }
2362    }
2363}
2364
2365/*===========================================================================
2366 * FUNCTION   : getMinFrameDuration
2367 *
2368 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2369 *              and current request configuration.
2370 *
2371 * PARAMETERS : @request: requset sent by the frameworks
2372 *
2373 * RETURN     : min farme duration for a particular request
2374 *
2375 *==========================================================================*/
2376int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2377{
2378    bool hasJpegStream = false;
2379    bool hasRawStream = false;
2380    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2381        const camera3_stream_t *stream = request->output_buffers[i].stream;
2382        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2383            hasJpegStream = true;
2384        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2385                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2386                stream->format == HAL_PIXEL_FORMAT_RAW16)
2387            hasRawStream = true;
2388    }
2389
2390    if (!hasJpegStream)
2391        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2392    else
2393        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2394}
2395
2396/*===========================================================================
2397 * FUNCTION   : handleBuffersDuringFlushLock
2398 *
2399 * DESCRIPTION: Account for buffers returned from back-end during flush
2400 *              This function is executed while mMutex is held by the caller.
2401 *
2402 * PARAMETERS :
2403 *   @buffer: image buffer for the callback
2404 *
2405 * RETURN     :
2406 *==========================================================================*/
2407void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2408{
2409    bool buffer_found = false;
2410    for (List<PendingBuffersInRequest>::iterator req =
2411            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2412            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2413        for (List<PendingBufferInfo>::iterator i =
2414                req->mPendingBufferList.begin();
2415                i != req->mPendingBufferList.end(); i++) {
2416            if (i->buffer == buffer->buffer) {
2417                mPendingBuffersMap.numPendingBufsAtFlush--;
2418                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2419                    buffer->buffer, req->frame_number,
2420                    mPendingBuffersMap.numPendingBufsAtFlush);
2421                buffer_found = true;
2422                break;
2423            }
2424        }
2425        if (buffer_found) {
2426            break;
2427        }
2428    }
2429    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2430        //signal the flush()
2431        LOGD("All buffers returned to HAL. Continue flush");
2432        pthread_cond_signal(&mBuffersCond);
2433    }
2434}
2435
2436
2437/*===========================================================================
2438 * FUNCTION   : handlePendingReprocResults
2439 *
2440 * DESCRIPTION: check and notify on any pending reprocess results
2441 *
2442 * PARAMETERS :
2443 *   @frame_number   : Pending request frame number
2444 *
2445 * RETURN     : int32_t type of status
2446 *              NO_ERROR  -- success
2447 *              none-zero failure code
2448 *==========================================================================*/
2449int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2450{
2451    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2452            j != mPendingReprocessResultList.end(); j++) {
2453        if (j->frame_number == frame_number) {
2454            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2455
2456            LOGD("Delayed reprocess notify %d",
2457                    frame_number);
2458
2459            for (pendingRequestIterator k = mPendingRequestsList.begin();
2460                    k != mPendingRequestsList.end(); k++) {
2461
2462                if (k->frame_number == j->frame_number) {
2463                    LOGD("Found reprocess frame number %d in pending reprocess List "
2464                            "Take it out!!",
2465                            k->frame_number);
2466
2467                    camera3_capture_result result;
2468                    memset(&result, 0, sizeof(camera3_capture_result));
2469                    result.frame_number = frame_number;
2470                    result.num_output_buffers = 1;
2471                    result.output_buffers =  &j->buffer;
2472                    result.input_buffer = k->input_buffer;
2473                    result.result = k->settings;
2474                    result.partial_result = PARTIAL_RESULT_COUNT;
2475                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2476
2477                    erasePendingRequest(k);
2478                    break;
2479                }
2480            }
2481            mPendingReprocessResultList.erase(j);
2482            break;
2483        }
2484    }
2485    return NO_ERROR;
2486}
2487
2488/*===========================================================================
2489 * FUNCTION   : handleBatchMetadata
2490 *
2491 * DESCRIPTION: Handles metadata buffer callback in batch mode
2492 *
2493 * PARAMETERS : @metadata_buf: metadata buffer
2494 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2495 *                 the meta buf in this method
2496 *
2497 * RETURN     :
2498 *
2499 *==========================================================================*/
2500void QCamera3HardwareInterface::handleBatchMetadata(
2501        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2502{
2503    ATRACE_CALL();
2504
2505    if (NULL == metadata_buf) {
2506        LOGE("metadata_buf is NULL");
2507        return;
2508    }
2509    /* In batch mode, the metdata will contain the frame number and timestamp of
2510     * the last frame in the batch. Eg: a batch containing buffers from request
2511     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2512     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2513     * multiple process_capture_results */
2514    metadata_buffer_t *metadata =
2515            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2516    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2517    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2518    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2519    uint32_t frame_number = 0, urgent_frame_number = 0;
2520    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2521    bool invalid_metadata = false;
2522    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2523    size_t loopCount = 1;
2524
2525    int32_t *p_frame_number_valid =
2526            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2527    uint32_t *p_frame_number =
2528            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2529    int64_t *p_capture_time =
2530            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2531    int32_t *p_urgent_frame_number_valid =
2532            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2533    uint32_t *p_urgent_frame_number =
2534            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2535
2536    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2537            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2538            (NULL == p_urgent_frame_number)) {
2539        LOGE("Invalid metadata");
2540        invalid_metadata = true;
2541    } else {
2542        frame_number_valid = *p_frame_number_valid;
2543        last_frame_number = *p_frame_number;
2544        last_frame_capture_time = *p_capture_time;
2545        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2546        last_urgent_frame_number = *p_urgent_frame_number;
2547    }
2548
2549    /* In batchmode, when no video buffers are requested, set_parms are sent
2550     * for every capture_request. The difference between consecutive urgent
2551     * frame numbers and frame numbers should be used to interpolate the
2552     * corresponding frame numbers and time stamps */
2553    pthread_mutex_lock(&mMutex);
2554    if (urgent_frame_number_valid) {
2555        first_urgent_frame_number =
2556                mPendingBatchMap.valueFor(last_urgent_frame_number);
2557        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2558                first_urgent_frame_number;
2559
2560        LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2561                 urgent_frame_number_valid,
2562                first_urgent_frame_number, last_urgent_frame_number);
2563    }
2564
2565    if (frame_number_valid) {
2566        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2567        frameNumDiff = last_frame_number + 1 -
2568                first_frame_number;
2569        mPendingBatchMap.removeItem(last_frame_number);
2570
2571        LOGD("frm: valid: %d frm_num: %d - %d",
2572                 frame_number_valid,
2573                first_frame_number, last_frame_number);
2574
2575    }
2576    pthread_mutex_unlock(&mMutex);
2577
2578    if (urgent_frame_number_valid || frame_number_valid) {
2579        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2580        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2581            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2582                     urgentFrameNumDiff, last_urgent_frame_number);
2583        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2584            LOGE("frameNumDiff: %d frameNum: %d",
2585                     frameNumDiff, last_frame_number);
2586    }
2587
2588    for (size_t i = 0; i < loopCount; i++) {
2589        /* handleMetadataWithLock is called even for invalid_metadata for
2590         * pipeline depth calculation */
2591        if (!invalid_metadata) {
2592            /* Infer frame number. Batch metadata contains frame number of the
2593             * last frame */
2594            if (urgent_frame_number_valid) {
2595                if (i < urgentFrameNumDiff) {
2596                    urgent_frame_number =
2597                            first_urgent_frame_number + i;
2598                    LOGD("inferred urgent frame_number: %d",
2599                             urgent_frame_number);
2600                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2601                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2602                } else {
2603                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2604                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2605                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2606                }
2607            }
2608
2609            /* Infer frame number. Batch metadata contains frame number of the
2610             * last frame */
2611            if (frame_number_valid) {
2612                if (i < frameNumDiff) {
2613                    frame_number = first_frame_number + i;
2614                    LOGD("inferred frame_number: %d", frame_number);
2615                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2616                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2617                } else {
2618                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2619                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2620                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2621                }
2622            }
2623
2624            if (last_frame_capture_time) {
2625                //Infer timestamp
2626                first_frame_capture_time = last_frame_capture_time -
2627                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2628                capture_time =
2629                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2630                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2631                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2632                LOGD("batch capture_time: %lld, capture_time: %lld",
2633                         last_frame_capture_time, capture_time);
2634            }
2635        }
2636        pthread_mutex_lock(&mMutex);
2637        handleMetadataWithLock(metadata_buf,
2638                false /* free_and_bufdone_meta_buf */);
2639        pthread_mutex_unlock(&mMutex);
2640    }
2641
2642    /* BufDone metadata buffer */
2643    if (free_and_bufdone_meta_buf) {
2644        mMetadataChannel->bufDone(metadata_buf);
2645        free(metadata_buf);
2646    }
2647}
2648
2649/*===========================================================================
2650 * FUNCTION   : handleMetadataWithLock
2651 *
2652 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2653 *
2654 * PARAMETERS : @metadata_buf: metadata buffer
2655 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2656 *                 the meta buf in this method
2657 *
2658 * RETURN     :
2659 *
2660 *==========================================================================*/
2661void QCamera3HardwareInterface::handleMetadataWithLock(
2662    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2663{
2664    ATRACE_CALL();
2665    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2666        //during flush do not send metadata from this thread
2667        LOGD("not sending metadata during flush or when mState is error");
2668        if (free_and_bufdone_meta_buf) {
2669            mMetadataChannel->bufDone(metadata_buf);
2670            free(metadata_buf);
2671        }
2672        return;
2673    }
2674
2675    //not in flush
2676    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2677    int32_t frame_number_valid, urgent_frame_number_valid;
2678    uint32_t frame_number, urgent_frame_number;
2679    int64_t capture_time;
2680    nsecs_t currentSysTime;
2681
2682    int32_t *p_frame_number_valid =
2683            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2684    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2685    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2686    int32_t *p_urgent_frame_number_valid =
2687            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2688    uint32_t *p_urgent_frame_number =
2689            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2690    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2691            metadata) {
2692        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2693                 *p_frame_number_valid, *p_frame_number);
2694    }
2695
2696    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2697            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2698        LOGE("Invalid metadata");
2699        if (free_and_bufdone_meta_buf) {
2700            mMetadataChannel->bufDone(metadata_buf);
2701            free(metadata_buf);
2702        }
2703        goto done_metadata;
2704    }
2705    frame_number_valid =        *p_frame_number_valid;
2706    frame_number =              *p_frame_number;
2707    capture_time =              *p_capture_time;
2708    urgent_frame_number_valid = *p_urgent_frame_number_valid;
2709    urgent_frame_number =       *p_urgent_frame_number;
2710    currentSysTime =            systemTime(CLOCK_MONOTONIC);
2711
2712    // Detect if buffers from any requests are overdue
2713    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2714        if ( (currentSysTime - req.timestamp) >
2715            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2716            for (auto &missed : req.mPendingBufferList) {
2717                LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
2718                    "stream type = %d, stream format = %d",
2719                    frame_number, req.frame_number, missed.buffer,
2720                    missed.stream->stream_type, missed.stream->format);
2721            }
2722        }
2723    }
2724    //Partial result on process_capture_result for timestamp
2725    if (urgent_frame_number_valid) {
2726        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2727           urgent_frame_number, capture_time);
2728
2729        //Recieved an urgent Frame Number, handle it
2730        //using partial results
2731        for (pendingRequestIterator i =
2732                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2733            LOGD("Iterator Frame = %d urgent frame = %d",
2734                 i->frame_number, urgent_frame_number);
2735
2736            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2737                (i->partial_result_cnt == 0)) {
2738                LOGE("Error: HAL missed urgent metadata for frame number %d",
2739                         i->frame_number);
2740            }
2741
2742            if (i->frame_number == urgent_frame_number &&
2743                     i->bUrgentReceived == 0) {
2744
2745                camera3_capture_result_t result;
2746                memset(&result, 0, sizeof(camera3_capture_result_t));
2747
2748                i->partial_result_cnt++;
2749                i->bUrgentReceived = 1;
2750                // Extract 3A metadata
2751                result.result =
2752                    translateCbUrgentMetadataToResultMetadata(metadata);
2753                // Populate metadata result
2754                result.frame_number = urgent_frame_number;
2755                result.num_output_buffers = 0;
2756                result.output_buffers = NULL;
2757                result.partial_result = i->partial_result_cnt;
2758
2759                mCallbackOps->process_capture_result(mCallbackOps, &result);
2760                LOGD("urgent frame_number = %u, capture_time = %lld",
2761                      result.frame_number, capture_time);
2762                free_camera_metadata((camera_metadata_t *)result.result);
2763                break;
2764            }
2765        }
2766    }
2767
2768    if (!frame_number_valid) {
2769        LOGD("Not a valid normal frame number, used as SOF only");
2770        if (free_and_bufdone_meta_buf) {
2771            mMetadataChannel->bufDone(metadata_buf);
2772            free(metadata_buf);
2773        }
2774        goto done_metadata;
2775    }
2776    LOGH("valid frame_number = %u, capture_time = %lld",
2777            frame_number, capture_time);
2778
2779    for (pendingRequestIterator i = mPendingRequestsList.begin();
2780            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2781        // Flush out all entries with less or equal frame numbers.
2782
2783        camera3_capture_result_t result;
2784        memset(&result, 0, sizeof(camera3_capture_result_t));
2785
2786        LOGD("frame_number in the list is %u", i->frame_number);
2787        i->partial_result_cnt++;
2788        result.partial_result = i->partial_result_cnt;
2789
2790        // Check whether any stream buffer corresponding to this is dropped or not
2791        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2792        // The API does not expect a blob buffer to be dropped
2793        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2794            /* Clear notify_msg structure */
2795            camera3_notify_msg_t notify_msg;
2796            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2797            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2798                    j != i->buffers.end(); j++) {
2799                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2800                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2801                for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2802                    if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2803                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2804                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2805                                __func__, i->frame_number, streamID, j->stream->format);
2806                        notify_msg.type = CAMERA3_MSG_ERROR;
2807                        notify_msg.message.error.frame_number = i->frame_number;
2808                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2809                        notify_msg.message.error.error_stream = j->stream;
2810                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2811                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2812                                __func__, i->frame_number, streamID, j->stream->format);
2813                        PendingFrameDropInfo PendingFrameDrop;
2814                        PendingFrameDrop.frame_number=i->frame_number;
2815                        PendingFrameDrop.stream_ID = streamID;
2816                        // Add the Frame drop info to mPendingFrameDropList
2817                        mPendingFrameDropList.push_back(PendingFrameDrop);
2818                   }
2819               }
2820            }
2821        }
2822
2823        // Send empty metadata with already filled buffers for dropped metadata
2824        // and send valid metadata with already filled buffers for current metadata
2825        /* we could hit this case when we either
2826         * 1. have a pending reprocess request or
2827         * 2. miss a metadata buffer callback */
2828        if (i->frame_number < frame_number) {
2829            if (i->input_buffer) {
2830                /* this will be handled in handleInputBufferWithLock */
2831                i++;
2832                continue;
2833            } else {
2834                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
2835                if (free_and_bufdone_meta_buf) {
2836                    mMetadataChannel->bufDone(metadata_buf);
2837                    free(metadata_buf);
2838                }
2839                mState = ERROR;
2840                goto done_metadata;
2841            }
2842        } else {
2843            mPendingLiveRequest--;
2844            /* Clear notify_msg structure */
2845            camera3_notify_msg_t notify_msg;
2846            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2847
2848            // Send shutter notify to frameworks
2849            notify_msg.type = CAMERA3_MSG_SHUTTER;
2850            notify_msg.message.shutter.frame_number = i->frame_number;
2851            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2852            mCallbackOps->notify(mCallbackOps, &notify_msg);
2853
2854            i->timestamp = capture_time;
2855
2856            // Find channel requiring metadata, meaning internal offline postprocess
2857            // is needed.
2858            //TODO: for now, we don't support two streams requiring metadata at the same time.
2859            // (because we are not making copies, and metadata buffer is not reference counted.
2860            bool internalPproc = false;
2861            for (pendingBufferIterator iter = i->buffers.begin();
2862                    iter != i->buffers.end(); iter++) {
2863                if (iter->need_metadata) {
2864                    internalPproc = true;
2865                    QCamera3ProcessingChannel *channel =
2866                            (QCamera3ProcessingChannel *)iter->stream->priv;
2867                    channel->queueReprocMetadata(metadata_buf);
2868                    break;
2869                }
2870            }
2871
2872            result.result = translateFromHalMetadata(metadata,
2873                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2874                    i->capture_intent, i->hybrid_ae_enable, internalPproc, i->fwkCacMode);
2875
2876            saveExifParams(metadata);
2877
2878            if (i->blob_request) {
2879                {
2880                    //Dump tuning metadata if enabled and available
2881                    char prop[PROPERTY_VALUE_MAX];
2882                    memset(prop, 0, sizeof(prop));
2883                    property_get("persist.camera.dumpmetadata", prop, "0");
2884                    int32_t enabled = atoi(prop);
2885                    if (enabled && metadata->is_tuning_params_valid) {
2886                        dumpMetadataToFile(metadata->tuning_params,
2887                               mMetaFrameCount,
2888                               enabled,
2889                               "Snapshot",
2890                               frame_number);
2891                    }
2892                }
2893            }
2894
2895            if (!internalPproc) {
2896                LOGD("couldn't find need_metadata for this metadata");
2897                // Return metadata buffer
2898                if (free_and_bufdone_meta_buf) {
2899                    mMetadataChannel->bufDone(metadata_buf);
2900                    free(metadata_buf);
2901                }
2902            }
2903        }
2904        if (!result.result) {
2905            LOGE("metadata is NULL");
2906        }
2907        result.frame_number = i->frame_number;
2908        result.input_buffer = i->input_buffer;
2909        result.num_output_buffers = 0;
2910        result.output_buffers = NULL;
2911        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2912                    j != i->buffers.end(); j++) {
2913            if (j->buffer) {
2914                result.num_output_buffers++;
2915            }
2916        }
2917
2918        updateFpsInPreviewBuffer(metadata, i->frame_number);
2919
2920        if (result.num_output_buffers > 0) {
2921            camera3_stream_buffer_t *result_buffers =
2922                new camera3_stream_buffer_t[result.num_output_buffers];
2923            if (result_buffers != NULL) {
2924                size_t result_buffers_idx = 0;
2925                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2926                        j != i->buffers.end(); j++) {
2927                    if (j->buffer) {
2928                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2929                                m != mPendingFrameDropList.end(); m++) {
2930                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2931                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2932                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2933                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2934                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
2935                                        frame_number, streamID);
2936                                m = mPendingFrameDropList.erase(m);
2937                                break;
2938                            }
2939                        }
2940                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
2941                        result_buffers[result_buffers_idx++] = *(j->buffer);
2942                        free(j->buffer);
2943                        j->buffer = NULL;
2944                    }
2945                }
2946                result.output_buffers = result_buffers;
2947                mCallbackOps->process_capture_result(mCallbackOps, &result);
2948                LOGD("meta frame_number = %u, capture_time = %lld",
2949                        result.frame_number, i->timestamp);
2950                free_camera_metadata((camera_metadata_t *)result.result);
2951                delete[] result_buffers;
2952            }else {
2953                LOGE("Fatal error: out of memory");
2954            }
2955        } else {
2956            mCallbackOps->process_capture_result(mCallbackOps, &result);
2957            LOGD("meta frame_number = %u, capture_time = %lld",
2958                    result.frame_number, i->timestamp);
2959            free_camera_metadata((camera_metadata_t *)result.result);
2960        }
2961
2962        i = erasePendingRequest(i);
2963
2964        if (!mPendingReprocessResultList.empty()) {
2965            handlePendingReprocResults(frame_number + 1);
2966        }
2967    }
2968
2969done_metadata:
2970    for (pendingRequestIterator i = mPendingRequestsList.begin();
2971            i != mPendingRequestsList.end() ;i++) {
2972        i->pipeline_depth++;
2973    }
2974    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
2975    unblockRequestIfNecessary();
2976}
2977
2978/*===========================================================================
2979 * FUNCTION   : hdrPlusPerfLock
2980 *
2981 * DESCRIPTION: perf lock for HDR+ using custom intent
2982 *
2983 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2984 *
2985 * RETURN     : None
2986 *
2987 *==========================================================================*/
2988void QCamera3HardwareInterface::hdrPlusPerfLock(
2989        mm_camera_super_buf_t *metadata_buf)
2990{
2991    if (NULL == metadata_buf) {
2992        LOGE("metadata_buf is NULL");
2993        return;
2994    }
2995    metadata_buffer_t *metadata =
2996            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2997    int32_t *p_frame_number_valid =
2998            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2999    uint32_t *p_frame_number =
3000            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3001
3002    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3003        LOGE("%s: Invalid metadata", __func__);
3004        return;
3005    }
3006
3007    //acquire perf lock for 5 sec after the last HDR frame is captured
3008    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3009        if ((p_frame_number != NULL) &&
3010                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3011            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3012        }
3013    }
3014
3015    //release lock after perf lock timer is expired. If lock is already released,
3016    //isTimerReset returns false
3017    if (m_perfLock.isTimerReset()) {
3018        mLastCustIntentFrmNum = -1;
3019        m_perfLock.lock_rel_timed();
3020    }
3021}
3022
3023/*===========================================================================
3024 * FUNCTION   : handleInputBufferWithLock
3025 *
3026 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3027 *
3028 * PARAMETERS : @frame_number: frame number of the input buffer
3029 *
3030 * RETURN     :
3031 *
3032 *==========================================================================*/
3033void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3034{
3035    ATRACE_CALL();
3036    pendingRequestIterator i = mPendingRequestsList.begin();
3037    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3038        i++;
3039    }
3040    if (i != mPendingRequestsList.end() && i->input_buffer) {
3041        //found the right request
3042        if (!i->shutter_notified) {
3043            CameraMetadata settings;
3044            camera3_notify_msg_t notify_msg;
3045            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3046            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3047            if(i->settings) {
3048                settings = i->settings;
3049                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3050                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3051                } else {
3052                    LOGE("No timestamp in input settings! Using current one.");
3053                }
3054            } else {
3055                LOGE("Input settings missing!");
3056            }
3057
3058            notify_msg.type = CAMERA3_MSG_SHUTTER;
3059            notify_msg.message.shutter.frame_number = frame_number;
3060            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3061            mCallbackOps->notify(mCallbackOps, &notify_msg);
3062            i->shutter_notified = true;
3063            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3064                        i->frame_number, notify_msg.message.shutter.timestamp);
3065        }
3066
3067        if (i->input_buffer->release_fence != -1) {
3068           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3069           close(i->input_buffer->release_fence);
3070           if (rc != OK) {
3071               LOGE("input buffer sync wait failed %d", rc);
3072           }
3073        }
3074
3075        camera3_capture_result result;
3076        memset(&result, 0, sizeof(camera3_capture_result));
3077        result.frame_number = frame_number;
3078        result.result = i->settings;
3079        result.input_buffer = i->input_buffer;
3080        result.partial_result = PARTIAL_RESULT_COUNT;
3081
3082        mCallbackOps->process_capture_result(mCallbackOps, &result);
3083        LOGD("Input request metadata and input buffer frame_number = %u",
3084                        i->frame_number);
3085        i = erasePendingRequest(i);
3086    } else {
3087        LOGE("Could not find input request for frame number %d", frame_number);
3088    }
3089}
3090
3091/*===========================================================================
3092 * FUNCTION   : handleBufferWithLock
3093 *
3094 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3095 *
3096 * PARAMETERS : @buffer: image buffer for the callback
3097 *              @frame_number: frame number of the image buffer
3098 *
3099 * RETURN     :
3100 *
3101 *==========================================================================*/
3102void QCamera3HardwareInterface::handleBufferWithLock(
3103    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3104{
3105    ATRACE_CALL();
3106    /* Nothing to be done during error state */
3107    if ((ERROR == mState) || (DEINIT == mState)) {
3108        return;
3109    }
3110    if (mFlushPerf) {
3111        handleBuffersDuringFlushLock(buffer);
3112        return;
3113    }
3114    //not in flush
3115    // If the frame number doesn't exist in the pending request list,
3116    // directly send the buffer to the frameworks, and update pending buffers map
3117    // Otherwise, book-keep the buffer.
3118    pendingRequestIterator i = mPendingRequestsList.begin();
3119    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3120        i++;
3121    }
3122    if (i == mPendingRequestsList.end()) {
3123        // Verify all pending requests frame_numbers are greater
3124        for (pendingRequestIterator j = mPendingRequestsList.begin();
3125                j != mPendingRequestsList.end(); j++) {
3126            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3127                LOGW("Error: pending live frame number %d is smaller than %d",
3128                         j->frame_number, frame_number);
3129            }
3130        }
3131        camera3_capture_result_t result;
3132        memset(&result, 0, sizeof(camera3_capture_result_t));
3133        result.result = NULL;
3134        result.frame_number = frame_number;
3135        result.num_output_buffers = 1;
3136        result.partial_result = 0;
3137        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3138                m != mPendingFrameDropList.end(); m++) {
3139            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3140            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3141            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3142                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3143                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3144                         frame_number, streamID);
3145                m = mPendingFrameDropList.erase(m);
3146                break;
3147            }
3148        }
3149        result.output_buffers = buffer;
3150        LOGH("result frame_number = %d, buffer = %p",
3151                 frame_number, buffer->buffer);
3152
3153        mPendingBuffersMap.removeBuf(buffer->buffer);
3154
3155        mCallbackOps->process_capture_result(mCallbackOps, &result);
3156    } else {
3157        if (i->input_buffer) {
3158            CameraMetadata settings;
3159            camera3_notify_msg_t notify_msg;
3160            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3161            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3162            if(i->settings) {
3163                settings = i->settings;
3164                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3165                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3166                } else {
3167                    LOGW("No timestamp in input settings! Using current one.");
3168                }
3169            } else {
3170                LOGE("Input settings missing!");
3171            }
3172
3173            notify_msg.type = CAMERA3_MSG_SHUTTER;
3174            notify_msg.message.shutter.frame_number = frame_number;
3175            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3176
3177            if (i->input_buffer->release_fence != -1) {
3178               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3179               close(i->input_buffer->release_fence);
3180               if (rc != OK) {
3181                   LOGE("input buffer sync wait failed %d", rc);
3182               }
3183            }
3184            mPendingBuffersMap.removeBuf(buffer->buffer);
3185
3186            bool notifyNow = true;
3187            for (pendingRequestIterator j = mPendingRequestsList.begin();
3188                    j != mPendingRequestsList.end(); j++) {
3189                if (j->frame_number < frame_number) {
3190                    notifyNow = false;
3191                    break;
3192                }
3193            }
3194
3195            if (notifyNow) {
3196                camera3_capture_result result;
3197                memset(&result, 0, sizeof(camera3_capture_result));
3198                result.frame_number = frame_number;
3199                result.result = i->settings;
3200                result.input_buffer = i->input_buffer;
3201                result.num_output_buffers = 1;
3202                result.output_buffers = buffer;
3203                result.partial_result = PARTIAL_RESULT_COUNT;
3204
3205                mCallbackOps->notify(mCallbackOps, &notify_msg);
3206                mCallbackOps->process_capture_result(mCallbackOps, &result);
3207                LOGD("Notify reprocess now %d!", frame_number);
3208                i = erasePendingRequest(i);
3209            } else {
3210                // Cache reprocess result for later
3211                PendingReprocessResult pendingResult;
3212                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3213                pendingResult.notify_msg = notify_msg;
3214                pendingResult.buffer = *buffer;
3215                pendingResult.frame_number = frame_number;
3216                mPendingReprocessResultList.push_back(pendingResult);
3217                LOGD("Cache reprocess result %d!", frame_number);
3218            }
3219        } else {
3220            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3221                j != i->buffers.end(); j++) {
3222                if (j->stream == buffer->stream) {
3223                    if (j->buffer != NULL) {
3224                        LOGE("Error: buffer is already set");
3225                    } else {
3226                        j->buffer = (camera3_stream_buffer_t *)malloc(
3227                            sizeof(camera3_stream_buffer_t));
3228                        *(j->buffer) = *buffer;
3229                        LOGH("cache buffer %p at result frame_number %u",
3230                             buffer->buffer, frame_number);
3231                    }
3232                }
3233            }
3234        }
3235    }
3236}
3237
3238/*===========================================================================
3239 * FUNCTION   : unblockRequestIfNecessary
3240 *
3241 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3242 *              that mMutex is held when this function is called.
3243 *
3244 * PARAMETERS :
3245 *
3246 * RETURN     :
3247 *
3248 *==========================================================================*/
3249void QCamera3HardwareInterface::unblockRequestIfNecessary()
3250{
3251   // Unblock process_capture_request
3252   pthread_cond_signal(&mRequestCond);
3253}
3254
3255
3256/*===========================================================================
3257 * FUNCTION   : processCaptureRequest
3258 *
3259 * DESCRIPTION: process a capture request from camera service
3260 *
3261 * PARAMETERS :
3262 *   @request : request from framework to process
3263 *
3264 * RETURN     :
3265 *
3266 *==========================================================================*/
3267int QCamera3HardwareInterface::processCaptureRequest(
3268                    camera3_capture_request_t *request)
3269{
3270    ATRACE_CALL();
3271    int rc = NO_ERROR;
3272    int32_t request_id;
3273    CameraMetadata meta;
3274    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3275    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3276    bool isVidBufRequested = false;
3277    camera3_stream_buffer_t *pInputBuffer = NULL;
3278
3279    pthread_mutex_lock(&mMutex);
3280
3281    // Validate current state
3282    switch (mState) {
3283        case CONFIGURED:
3284        case STARTED:
3285            /* valid state */
3286            break;
3287
3288        case ERROR:
3289            pthread_mutex_unlock(&mMutex);
3290            handleCameraDeviceError();
3291            return -ENODEV;
3292
3293        default:
3294            LOGE("Invalid state %d", mState);
3295            pthread_mutex_unlock(&mMutex);
3296            return -ENODEV;
3297    }
3298
3299    rc = validateCaptureRequest(request);
3300    if (rc != NO_ERROR) {
3301        LOGE("incoming request is not valid");
3302        pthread_mutex_unlock(&mMutex);
3303        return rc;
3304    }
3305
3306    meta = request->settings;
3307
3308    // For first capture request, send capture intent, and
3309    // stream on all streams
3310    if (mState == CONFIGURED) {
3311        // send an unconfigure to the backend so that the isp
3312        // resources are deallocated
3313        if (!mFirstConfiguration) {
3314            cam_stream_size_info_t stream_config_info;
3315            int32_t hal_version = CAM_HAL_V3;
3316            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3317            stream_config_info.buffer_info.min_buffers =
3318                    MIN_INFLIGHT_REQUESTS;
3319            stream_config_info.buffer_info.max_buffers =
3320                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3321            clear_metadata_buffer(mParameters);
3322            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3323                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3324            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3325                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3326            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3327                    mParameters);
3328            if (rc < 0) {
3329                LOGE("set_parms for unconfigure failed");
3330                pthread_mutex_unlock(&mMutex);
3331                return rc;
3332            }
3333        }
3334        m_perfLock.lock_acq();
3335        /* get eis information for stream configuration */
3336        cam_is_type_t is_type;
3337        char is_type_value[PROPERTY_VALUE_MAX];
3338        property_get("persist.camera.is_type", is_type_value, "0");
3339        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3340
3341        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3342            int32_t hal_version = CAM_HAL_V3;
3343            uint8_t captureIntent =
3344                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3345            mCaptureIntent = captureIntent;
3346            clear_metadata_buffer(mParameters);
3347            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3348            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3349        }
3350
3351        //If EIS is enabled, turn it on for video
3352        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3353        int32_t vsMode;
3354        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3355        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3356            rc = BAD_VALUE;
3357        }
3358
3359        //IS type will be 0 unless EIS is supported. If EIS is supported
3360        //it could either be 1 or 4 depending on the stream and video size
3361        if (setEis) {
3362            if (!m_bEisSupportedSize) {
3363                is_type = IS_TYPE_DIS;
3364            } else {
3365                is_type = IS_TYPE_EIS_2_0;
3366            }
3367            mStreamConfigInfo.is_type = is_type;
3368        } else {
3369            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3370        }
3371
3372        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3373                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3374        int32_t tintless_value = 1;
3375        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3376                CAM_INTF_PARM_TINTLESS, tintless_value);
3377        //Disable CDS for HFR mode or if DIS/EIS is on.
3378        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3379        //after every configure_stream
3380        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3381                (m_bIsVideo)) {
3382            int32_t cds = CAM_CDS_MODE_OFF;
3383            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3384                    CAM_INTF_PARM_CDS_MODE, cds))
3385                LOGE("Failed to disable CDS for HFR mode");
3386
3387        }
3388        setMobicat();
3389
3390        /* Set fps and hfr mode while sending meta stream info so that sensor
3391         * can configure appropriate streaming mode */
3392        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3393        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3394            rc = setHalFpsRange(meta, mParameters);
3395            if (rc != NO_ERROR) {
3396                LOGE("setHalFpsRange failed");
3397            }
3398        }
3399        if (meta.exists(ANDROID_CONTROL_MODE)) {
3400            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3401            rc = extractSceneMode(meta, metaMode, mParameters);
3402            if (rc != NO_ERROR) {
3403                LOGE("extractSceneMode failed");
3404            }
3405        }
3406
3407        //TODO: validate the arguments, HSV scenemode should have only the
3408        //advertised fps ranges
3409
3410        /*set the capture intent, hal version, tintless, stream info,
3411         *and disenable parameters to the backend*/
3412        LOGD("set_parms META_STREAM_INFO " );
3413        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3414            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3415                    "Format:%d",
3416                    mStreamConfigInfo.type[i],
3417                    mStreamConfigInfo.stream_sizes[i].width,
3418                    mStreamConfigInfo.stream_sizes[i].height,
3419                    mStreamConfigInfo.postprocess_mask[i],
3420                    mStreamConfigInfo.format[i]);
3421        }
3422        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3423                    mParameters);
3424        if (rc < 0) {
3425            LOGE("set_parms failed for hal version, stream info");
3426        }
3427
3428        cam_dimension_t sensor_dim;
3429        memset(&sensor_dim, 0, sizeof(sensor_dim));
3430        rc = getSensorOutputSize(sensor_dim);
3431        if (rc != NO_ERROR) {
3432            LOGE("Failed to get sensor output size");
3433            pthread_mutex_unlock(&mMutex);
3434            goto error_exit;
3435        }
3436
3437        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3438                gCamCapability[mCameraId]->active_array_size.height,
3439                sensor_dim.width, sensor_dim.height);
3440
3441        /* Set batchmode before initializing channel. Since registerBuffer
3442         * internally initializes some of the channels, better set batchmode
3443         * even before first register buffer */
3444        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3445            it != mStreamInfo.end(); it++) {
3446            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3447            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3448                    && mBatchSize) {
3449                rc = channel->setBatchSize(mBatchSize);
3450                //Disable per frame map unmap for HFR/batchmode case
3451                rc |= channel->setPerFrameMapUnmap(false);
3452                if (NO_ERROR != rc) {
3453                    LOGE("Channel init failed %d", rc);
3454                    pthread_mutex_unlock(&mMutex);
3455                    goto error_exit;
3456                }
3457            }
3458        }
3459
3460        //First initialize all streams
3461        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3462            it != mStreamInfo.end(); it++) {
3463            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3464            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3465               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3466               setEis)
3467                rc = channel->initialize(is_type);
3468            else {
3469                rc = channel->initialize(IS_TYPE_NONE);
3470            }
3471            if (NO_ERROR != rc) {
3472                LOGE("Channel initialization failed %d", rc);
3473                pthread_mutex_unlock(&mMutex);
3474                goto error_exit;
3475            }
3476        }
3477
3478        if (mRawDumpChannel) {
3479            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3480            if (rc != NO_ERROR) {
3481                LOGE("Error: Raw Dump Channel init failed");
3482                pthread_mutex_unlock(&mMutex);
3483                goto error_exit;
3484            }
3485        }
3486        if (mSupportChannel) {
3487            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3488            if (rc < 0) {
3489                LOGE("Support channel initialization failed");
3490                pthread_mutex_unlock(&mMutex);
3491                goto error_exit;
3492            }
3493        }
3494        if (mAnalysisChannel) {
3495            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3496            if (rc < 0) {
3497                LOGE("Analysis channel initialization failed");
3498                pthread_mutex_unlock(&mMutex);
3499                goto error_exit;
3500            }
3501        }
3502        if (mDummyBatchChannel) {
3503            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3504            if (rc < 0) {
3505                LOGE("mDummyBatchChannel setBatchSize failed");
3506                pthread_mutex_unlock(&mMutex);
3507                goto error_exit;
3508            }
3509            rc = mDummyBatchChannel->initialize(is_type);
3510            if (rc < 0) {
3511                LOGE("mDummyBatchChannel initialization failed");
3512                pthread_mutex_unlock(&mMutex);
3513                goto error_exit;
3514            }
3515        }
3516
3517        // Set bundle info
3518        rc = setBundleInfo();
3519        if (rc < 0) {
3520            LOGE("setBundleInfo failed %d", rc);
3521            pthread_mutex_unlock(&mMutex);
3522            goto error_exit;
3523        }
3524
3525        //Then start them.
3526        LOGH("Start META Channel");
3527        rc = mMetadataChannel->start();
3528        if (rc < 0) {
3529            LOGE("META channel start failed");
3530            pthread_mutex_unlock(&mMutex);
3531            goto error_exit;
3532        }
3533
3534        if (mAnalysisChannel) {
3535            rc = mAnalysisChannel->start();
3536            if (rc < 0) {
3537                LOGE("Analysis channel start failed");
3538                mMetadataChannel->stop();
3539                pthread_mutex_unlock(&mMutex);
3540                goto error_exit;
3541            }
3542        }
3543
3544        if (mSupportChannel) {
3545            rc = mSupportChannel->start();
3546            if (rc < 0) {
3547                LOGE("Support channel start failed");
3548                mMetadataChannel->stop();
3549                /* Although support and analysis are mutually exclusive today
3550                   adding it in anycase for future proofing */
3551                if (mAnalysisChannel) {
3552                    mAnalysisChannel->stop();
3553                }
3554                pthread_mutex_unlock(&mMutex);
3555                goto error_exit;
3556            }
3557        }
3558        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3559            it != mStreamInfo.end(); it++) {
3560            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3561            LOGH("Start Processing Channel mask=%d",
3562                     channel->getStreamTypeMask());
3563            rc = channel->start();
3564            if (rc < 0) {
3565                LOGE("channel start failed");
3566                pthread_mutex_unlock(&mMutex);
3567                goto error_exit;
3568            }
3569        }
3570
3571        if (mRawDumpChannel) {
3572            LOGD("Starting raw dump stream");
3573            rc = mRawDumpChannel->start();
3574            if (rc != NO_ERROR) {
3575                LOGE("Error Starting Raw Dump Channel");
3576                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3577                      it != mStreamInfo.end(); it++) {
3578                    QCamera3Channel *channel =
3579                        (QCamera3Channel *)(*it)->stream->priv;
3580                    LOGH("Stopping Processing Channel mask=%d",
3581                        channel->getStreamTypeMask());
3582                    channel->stop();
3583                }
3584                if (mSupportChannel)
3585                    mSupportChannel->stop();
3586                if (mAnalysisChannel) {
3587                    mAnalysisChannel->stop();
3588                }
3589                mMetadataChannel->stop();
3590                pthread_mutex_unlock(&mMutex);
3591                goto error_exit;
3592            }
3593        }
3594
3595        if (mChannelHandle) {
3596
3597            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3598                    mChannelHandle);
3599            if (rc != NO_ERROR) {
3600                LOGE("start_channel failed %d", rc);
3601                pthread_mutex_unlock(&mMutex);
3602                goto error_exit;
3603            }
3604        }
3605
3606
3607        goto no_error;
3608error_exit:
3609        m_perfLock.lock_rel();
3610        return rc;
3611no_error:
3612        m_perfLock.lock_rel();
3613
3614        mWokenUpByDaemon = false;
3615        mPendingLiveRequest = 0;
3616        mFirstConfiguration = false;
3617        enablePowerHint();
3618    }
3619
3620    uint32_t frameNumber = request->frame_number;
3621    cam_stream_ID_t streamID;
3622
3623    if (mFlushPerf) {
3624        //we cannot accept any requests during flush
3625        LOGE("process_capture_request cannot proceed during flush");
3626        pthread_mutex_unlock(&mMutex);
3627        return NO_ERROR; //should return an error
3628    }
3629
3630    if (meta.exists(ANDROID_REQUEST_ID)) {
3631        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3632        mCurrentRequestId = request_id;
3633        LOGD("Received request with id: %d", request_id);
3634    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3635        LOGE("Unable to find request id field, \
3636                & no previous id available");
3637        pthread_mutex_unlock(&mMutex);
3638        return NAME_NOT_FOUND;
3639    } else {
3640        LOGD("Re-using old request id");
3641        request_id = mCurrentRequestId;
3642    }
3643
3644    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3645                                    request->num_output_buffers,
3646                                    request->input_buffer,
3647                                    frameNumber);
3648    // Acquire all request buffers first
3649    streamID.num_streams = 0;
3650    int blob_request = 0;
3651    uint32_t snapshotStreamId = 0;
3652    for (size_t i = 0; i < request->num_output_buffers; i++) {
3653        const camera3_stream_buffer_t& output = request->output_buffers[i];
3654        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3655
3656        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3657            //Call function to store local copy of jpeg data for encode params.
3658            blob_request = 1;
3659            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3660        }
3661
3662        if (output.acquire_fence != -1) {
3663           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3664           close(output.acquire_fence);
3665           if (rc != OK) {
3666              LOGE("sync wait failed %d", rc);
3667              pthread_mutex_unlock(&mMutex);
3668              return rc;
3669           }
3670        }
3671
3672        streamID.streamID[streamID.num_streams] =
3673            channel->getStreamID(channel->getStreamTypeMask());
3674        streamID.num_streams++;
3675
3676        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3677            isVidBufRequested = true;
3678        }
3679    }
3680
3681    if (blob_request) {
3682        KPI_ATRACE_INT("SNAPSHOT", 1);
3683    }
3684    if (blob_request && mRawDumpChannel) {
3685        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3686        streamID.streamID[streamID.num_streams] =
3687            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3688        streamID.num_streams++;
3689    }
3690
3691    if(request->input_buffer == NULL) {
3692        /* Parse the settings:
3693         * - For every request in NORMAL MODE
3694         * - For every request in HFR mode during preview only case
3695         * - For first request of every batch in HFR mode during video
3696         * recording. In batchmode the same settings except frame number is
3697         * repeated in each request of the batch.
3698         */
3699        if (!mBatchSize ||
3700           (mBatchSize && !isVidBufRequested) ||
3701           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3702            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3703            if (rc < 0) {
3704                LOGE("fail to set frame parameters");
3705                pthread_mutex_unlock(&mMutex);
3706                return rc;
3707            }
3708        }
3709        /* For batchMode HFR, setFrameParameters is not called for every
3710         * request. But only frame number of the latest request is parsed.
3711         * Keep track of first and last frame numbers in a batch so that
3712         * metadata for the frame numbers of batch can be duplicated in
3713         * handleBatchMetadta */
3714        if (mBatchSize) {
3715            if (!mToBeQueuedVidBufs) {
3716                //start of the batch
3717                mFirstFrameNumberInBatch = request->frame_number;
3718            }
3719            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3720                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3721                LOGE("Failed to set the frame number in the parameters");
3722                return BAD_VALUE;
3723            }
3724        }
3725        if (mNeedSensorRestart) {
3726            /* Unlock the mutex as restartSensor waits on the channels to be
3727             * stopped, which in turn calls stream callback functions -
3728             * handleBufferWithLock and handleMetadataWithLock */
3729            pthread_mutex_unlock(&mMutex);
3730            rc = dynamicUpdateMetaStreamInfo();
3731            if (rc != NO_ERROR) {
3732                LOGE("Restarting the sensor failed");
3733                return BAD_VALUE;
3734            }
3735            mNeedSensorRestart = false;
3736            pthread_mutex_lock(&mMutex);
3737        }
3738    } else {
3739
3740        if (request->input_buffer->acquire_fence != -1) {
3741           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3742           close(request->input_buffer->acquire_fence);
3743           if (rc != OK) {
3744              LOGE("input buffer sync wait failed %d", rc);
3745              pthread_mutex_unlock(&mMutex);
3746              return rc;
3747           }
3748        }
3749    }
3750
3751    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3752        mLastCustIntentFrmNum = frameNumber;
3753    }
3754    /* Update pending request list and pending buffers map */
3755    PendingRequestInfo pendingRequest;
3756    pendingRequestIterator latestRequest;
3757    pendingRequest.frame_number = frameNumber;
3758    pendingRequest.num_buffers = request->num_output_buffers;
3759    pendingRequest.request_id = request_id;
3760    pendingRequest.blob_request = blob_request;
3761    pendingRequest.timestamp = 0;
3762    pendingRequest.bUrgentReceived = 0;
3763    if (request->input_buffer) {
3764        pendingRequest.input_buffer =
3765                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3766        *(pendingRequest.input_buffer) = *(request->input_buffer);
3767        pInputBuffer = pendingRequest.input_buffer;
3768    } else {
3769       pendingRequest.input_buffer = NULL;
3770       pInputBuffer = NULL;
3771    }
3772
3773    pendingRequest.pipeline_depth = 0;
3774    pendingRequest.partial_result_cnt = 0;
3775    extractJpegMetadata(mCurJpegMeta, request);
3776    pendingRequest.jpegMetadata = mCurJpegMeta;
3777    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3778    pendingRequest.shutter_notified = false;
3779
3780    //extract capture intent
3781    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3782        mCaptureIntent =
3783                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3784    }
3785    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
3786        mHybridAeEnable =
3787                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
3788    }
3789    pendingRequest.capture_intent = mCaptureIntent;
3790    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
3791
3792    //extract CAC info
3793    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
3794        mCacMode =
3795                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
3796    }
3797    pendingRequest.fwkCacMode = mCacMode;
3798
3799    PendingBuffersInRequest bufsForCurRequest;
3800    bufsForCurRequest.frame_number = frameNumber;
3801    // Mark current timestamp for the new request
3802    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
3803
3804    for (size_t i = 0; i < request->num_output_buffers; i++) {
3805        RequestedBufferInfo requestedBuf;
3806        memset(&requestedBuf, 0, sizeof(requestedBuf));
3807        requestedBuf.stream = request->output_buffers[i].stream;
3808        requestedBuf.buffer = NULL;
3809        pendingRequest.buffers.push_back(requestedBuf);
3810
3811        // Add to buffer handle the pending buffers list
3812        PendingBufferInfo bufferInfo;
3813        bufferInfo.buffer = request->output_buffers[i].buffer;
3814        bufferInfo.stream = request->output_buffers[i].stream;
3815        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
3816        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3817        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3818            frameNumber, bufferInfo.buffer,
3819            channel->getStreamTypeMask(), bufferInfo.stream->format);
3820    }
3821    // Add this request packet into mPendingBuffersMap
3822    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
3823    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
3824        mPendingBuffersMap.get_num_overall_buffers());
3825
3826    latestRequest = mPendingRequestsList.insert(
3827            mPendingRequestsList.end(), pendingRequest);
3828    if(mFlush) {
3829        pthread_mutex_unlock(&mMutex);
3830        return NO_ERROR;
3831    }
3832
3833    // Notify metadata channel we receive a request
3834    mMetadataChannel->request(NULL, frameNumber);
3835
3836    if(request->input_buffer != NULL){
3837        LOGD("Input request, frame_number %d", frameNumber);
3838        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3839        if (NO_ERROR != rc) {
3840            LOGE("fail to set reproc parameters");
3841            pthread_mutex_unlock(&mMutex);
3842            return rc;
3843        }
3844    }
3845
3846    // Call request on other streams
3847    uint32_t streams_need_metadata = 0;
3848    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3849    for (size_t i = 0; i < request->num_output_buffers; i++) {
3850        const camera3_stream_buffer_t& output = request->output_buffers[i];
3851        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3852
3853        if (channel == NULL) {
3854            LOGW("invalid channel pointer for stream");
3855            continue;
3856        }
3857
3858        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3859            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
3860                      output.buffer, request->input_buffer, frameNumber);
3861            if(request->input_buffer != NULL){
3862                rc = channel->request(output.buffer, frameNumber,
3863                        pInputBuffer, &mReprocMeta);
3864                if (rc < 0) {
3865                    LOGE("Fail to request on picture channel");
3866                    pthread_mutex_unlock(&mMutex);
3867                    return rc;
3868                }
3869            } else {
3870                LOGD("snapshot request with buffer %p, frame_number %d",
3871                         output.buffer, frameNumber);
3872                if (!request->settings) {
3873                    rc = channel->request(output.buffer, frameNumber,
3874                            NULL, mPrevParameters);
3875                } else {
3876                    rc = channel->request(output.buffer, frameNumber,
3877                            NULL, mParameters);
3878                }
3879                if (rc < 0) {
3880                    LOGE("Fail to request on picture channel");
3881                    pthread_mutex_unlock(&mMutex);
3882                    return rc;
3883                }
3884                pendingBufferIter->need_metadata = true;
3885                streams_need_metadata++;
3886            }
3887        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3888            bool needMetadata = false;
3889
3890            if (m_perfLock.isPerfLockTimedAcquired()) {
3891                if (m_perfLock.isTimerReset())
3892                {
3893                    m_perfLock.lock_rel_timed();
3894                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3895                }
3896            } else {
3897                m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3898            }
3899
3900            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3901            rc = yuvChannel->request(output.buffer, frameNumber,
3902                    pInputBuffer,
3903                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3904            if (rc < 0) {
3905                LOGE("Fail to request on YUV channel");
3906                pthread_mutex_unlock(&mMutex);
3907                return rc;
3908            }
3909            pendingBufferIter->need_metadata = needMetadata;
3910            if (needMetadata)
3911                streams_need_metadata += 1;
3912            LOGD("calling YUV channel request, need_metadata is %d",
3913                     needMetadata);
3914        } else {
3915            LOGD("request with buffer %p, frame_number %d",
3916                  output.buffer, frameNumber);
3917            /* Set perf lock for API-2 zsl */
3918            if (IS_USAGE_ZSL(output.stream->usage)) {
3919                if (m_perfLock.isPerfLockTimedAcquired()) {
3920                    if (m_perfLock.isTimerReset())
3921                    {
3922                        m_perfLock.lock_rel_timed();
3923                        m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3924                    }
3925                } else {
3926                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3927                }
3928            }
3929
3930            rc = channel->request(output.buffer, frameNumber);
3931            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3932                    && mBatchSize) {
3933                mToBeQueuedVidBufs++;
3934                if (mToBeQueuedVidBufs == mBatchSize) {
3935                    channel->queueBatchBuf();
3936                }
3937            }
3938            if (rc < 0) {
3939                LOGE("request failed");
3940                pthread_mutex_unlock(&mMutex);
3941                return rc;
3942            }
3943        }
3944        pendingBufferIter++;
3945    }
3946
3947    //If 2 streams have need_metadata set to true, fail the request, unless
3948    //we copy/reference count the metadata buffer
3949    if (streams_need_metadata > 1) {
3950        LOGE("not supporting request in which two streams requires"
3951                " 2 HAL metadata for reprocessing");
3952        pthread_mutex_unlock(&mMutex);
3953        return -EINVAL;
3954    }
3955
3956    if(request->input_buffer == NULL) {
3957        /* Set the parameters to backend:
3958         * - For every request in NORMAL MODE
3959         * - For every request in HFR mode during preview only case
3960         * - Once every batch in HFR mode during video recording
3961         */
3962        if (!mBatchSize ||
3963           (mBatchSize && !isVidBufRequested) ||
3964           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3965            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3966                     mBatchSize, isVidBufRequested,
3967                    mToBeQueuedVidBufs);
3968            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3969                    mParameters);
3970            if (rc < 0) {
3971                LOGE("set_parms failed");
3972            }
3973            /* reset to zero coz, the batch is queued */
3974            mToBeQueuedVidBufs = 0;
3975            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3976        }
3977        mPendingLiveRequest++;
3978    }
3979
3980    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3981
3982    mState = STARTED;
3983    // Added a timed condition wait
3984    struct timespec ts;
3985    uint8_t isValidTimeout = 1;
3986    rc = clock_gettime(CLOCK_REALTIME, &ts);
3987    if (rc < 0) {
3988      isValidTimeout = 0;
3989      LOGE("Error reading the real time clock!!");
3990    }
3991    else {
3992      // Make timeout as 5 sec for request to be honored
3993      ts.tv_sec += 5;
3994    }
3995    //Block on conditional variable
3996    if (mBatchSize) {
3997        /* For HFR, more buffers are dequeued upfront to improve the performance */
3998        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3999        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4000    }
4001    if (m_perfLock.isPerfLockTimedAcquired() && m_perfLock.isTimerReset())
4002        m_perfLock.lock_rel_timed();
4003
4004    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
4005            (mState != ERROR) && (mState != DEINIT)) {
4006        if (!isValidTimeout) {
4007            LOGD("Blocking on conditional wait");
4008            pthread_cond_wait(&mRequestCond, &mMutex);
4009        }
4010        else {
4011            LOGD("Blocking on timed conditional wait");
4012            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4013            if (rc == ETIMEDOUT) {
4014                rc = -ENODEV;
4015                LOGE("Unblocked on timeout!!!!");
4016                break;
4017            }
4018        }
4019        LOGD("Unblocked");
4020        if (mWokenUpByDaemon) {
4021            mWokenUpByDaemon = false;
4022            if (mPendingLiveRequest < maxInFlightRequests)
4023                break;
4024        }
4025    }
4026    pthread_mutex_unlock(&mMutex);
4027
4028    return rc;
4029}
4030
4031/*===========================================================================
4032 * FUNCTION   : dump
4033 *
4034 * DESCRIPTION:
4035 *
4036 * PARAMETERS :
4037 *
4038 *
4039 * RETURN     :
4040 *==========================================================================*/
4041void QCamera3HardwareInterface::dump(int fd)
4042{
4043    pthread_mutex_lock(&mMutex);
4044    dprintf(fd, "\n Camera HAL3 information Begin \n");
4045
4046    dprintf(fd, "\nNumber of pending requests: %zu \n",
4047        mPendingRequestsList.size());
4048    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4049    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4050    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4051    for(pendingRequestIterator i = mPendingRequestsList.begin();
4052            i != mPendingRequestsList.end(); i++) {
4053        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4054        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4055        i->input_buffer);
4056    }
4057    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4058                mPendingBuffersMap.get_num_overall_buffers());
4059    dprintf(fd, "-------+------------------\n");
4060    dprintf(fd, " Frame | Stream type mask \n");
4061    dprintf(fd, "-------+------------------\n");
4062    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4063        for(auto &j : req.mPendingBufferList) {
4064            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4065            dprintf(fd, " %5d | %11d \n",
4066                    req.frame_number, channel->getStreamTypeMask());
4067        }
4068    }
4069    dprintf(fd, "-------+------------------\n");
4070
4071    dprintf(fd, "\nPending frame drop list: %zu\n",
4072        mPendingFrameDropList.size());
4073    dprintf(fd, "-------+-----------\n");
4074    dprintf(fd, " Frame | Stream ID \n");
4075    dprintf(fd, "-------+-----------\n");
4076    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4077        i != mPendingFrameDropList.end(); i++) {
4078        dprintf(fd, " %5d | %9d \n",
4079            i->frame_number, i->stream_ID);
4080    }
4081    dprintf(fd, "-------+-----------\n");
4082
4083    dprintf(fd, "\n Camera HAL3 information End \n");
4084
4085    /* use dumpsys media.camera as trigger to send update debug level event */
4086    mUpdateDebugLevel = true;
4087    pthread_mutex_unlock(&mMutex);
4088    return;
4089}
4090
4091/*===========================================================================
4092 * FUNCTION   : flush
4093 *
4094 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4095 *              conditionally restarts channels
4096 *
4097 * PARAMETERS :
4098 *  @ restartChannels: re-start all channels
4099 *
4100 *
4101 * RETURN     :
4102 *          0 on success
4103 *          Error code on failure
4104 *==========================================================================*/
4105int QCamera3HardwareInterface::flush(bool restartChannels)
4106{
4107    KPI_ATRACE_CALL();
4108    int32_t rc = NO_ERROR;
4109
4110    LOGD("Unblocking Process Capture Request");
4111    pthread_mutex_lock(&mMutex);
4112    mFlush = true;
4113    pthread_mutex_unlock(&mMutex);
4114
4115    rc = stopAllChannels();
4116    if (rc < 0) {
4117        LOGE("stopAllChannels failed");
4118        return rc;
4119    }
4120    if (mChannelHandle) {
4121        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4122                mChannelHandle);
4123    }
4124
4125    // Reset bundle info
4126    rc = setBundleInfo();
4127    if (rc < 0) {
4128        LOGE("setBundleInfo failed %d", rc);
4129        return rc;
4130    }
4131
4132    // Mutex Lock
4133    pthread_mutex_lock(&mMutex);
4134
4135    // Unblock process_capture_request
4136    mPendingLiveRequest = 0;
4137    pthread_cond_signal(&mRequestCond);
4138
4139    rc = notifyErrorForPendingRequests();
4140    if (rc < 0) {
4141        LOGE("notifyErrorForPendingRequests failed");
4142        pthread_mutex_unlock(&mMutex);
4143        return rc;
4144    }
4145
4146    mFlush = false;
4147
4148    // Start the Streams/Channels
4149    if (restartChannels) {
4150        rc = startAllChannels();
4151        if (rc < 0) {
4152            LOGE("startAllChannels failed");
4153            pthread_mutex_unlock(&mMutex);
4154            return rc;
4155        }
4156    }
4157
4158    if (mChannelHandle) {
4159        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4160                    mChannelHandle);
4161        if (rc < 0) {
4162            LOGE("start_channel failed");
4163            pthread_mutex_unlock(&mMutex);
4164            return rc;
4165        }
4166    }
4167
4168    pthread_mutex_unlock(&mMutex);
4169
4170    return 0;
4171}
4172
4173/*===========================================================================
4174 * FUNCTION   : flushPerf
4175 *
4176 * DESCRIPTION: This is the performance optimization version of flush that does
4177 *              not use stream off, rather flushes the system
4178 *
4179 * PARAMETERS :
4180 *
4181 *
4182 * RETURN     : 0 : success
4183 *              -EINVAL: input is malformed (device is not valid)
4184 *              -ENODEV: if the device has encountered a serious error
4185 *==========================================================================*/
4186int QCamera3HardwareInterface::flushPerf()
4187{
4188    ATRACE_CALL();
4189    int32_t rc = 0;
4190    struct timespec timeout;
4191    bool timed_wait = false;
4192
4193    pthread_mutex_lock(&mMutex);
4194    mFlushPerf = true;
4195    mPendingBuffersMap.numPendingBufsAtFlush =
4196        mPendingBuffersMap.get_num_overall_buffers();
4197    LOGD("Calling flush. Wait for %d buffers to return",
4198        mPendingBuffersMap.numPendingBufsAtFlush);
4199
4200    /* send the flush event to the backend */
4201    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4202    if (rc < 0) {
4203        LOGE("Error in flush: IOCTL failure");
4204        mFlushPerf = false;
4205        pthread_mutex_unlock(&mMutex);
4206        return -ENODEV;
4207    }
4208
4209    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4210        LOGD("No pending buffers in HAL, return flush");
4211        mFlushPerf = false;
4212        pthread_mutex_unlock(&mMutex);
4213        return rc;
4214    }
4215
4216    /* wait on a signal that buffers were received */
4217    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4218    if (rc < 0) {
4219        LOGE("Error reading the real time clock, cannot use timed wait");
4220    } else {
4221        timeout.tv_sec += FLUSH_TIMEOUT;
4222        timed_wait = true;
4223    }
4224
4225    //Block on conditional variable
4226    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4227        LOGD("Waiting on mBuffersCond");
4228        if (!timed_wait) {
4229            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4230            if (rc != 0) {
4231                 LOGE("pthread_cond_wait failed due to rc = %s",
4232                        strerror(rc));
4233                 break;
4234            }
4235        } else {
4236            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4237            if (rc != 0) {
4238                LOGE("pthread_cond_timedwait failed due to rc = %s",
4239                            strerror(rc));
4240                break;
4241            }
4242        }
4243    }
4244    if (rc != 0) {
4245        mFlushPerf = false;
4246        pthread_mutex_unlock(&mMutex);
4247        return -ENODEV;
4248    }
4249
4250    LOGD("Received buffers, now safe to return them");
4251
4252    //make sure the channels handle flush
4253    //currently only required for the picture channel to release snapshot resources
4254    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4255            it != mStreamInfo.end(); it++) {
4256        QCamera3Channel *channel = (*it)->channel;
4257        if (channel) {
4258            rc = channel->flush();
4259            if (rc) {
4260               LOGE("Flushing the channels failed with error %d", rc);
4261               // even though the channel flush failed we need to continue and
4262               // return the buffers we have to the framework, however the return
4263               // value will be an error
4264               rc = -ENODEV;
4265            }
4266        }
4267    }
4268
4269    /* notify the frameworks and send errored results */
4270    rc = notifyErrorForPendingRequests();
4271    if (rc < 0) {
4272        LOGE("notifyErrorForPendingRequests failed");
4273        pthread_mutex_unlock(&mMutex);
4274        return rc;
4275    }
4276
4277    //unblock process_capture_request
4278    mPendingLiveRequest = 0;
4279    unblockRequestIfNecessary();
4280
4281    mFlushPerf = false;
4282    pthread_mutex_unlock(&mMutex);
4283    LOGD ("Flush Operation complete. rc = %d", rc);
4284    return rc;
4285}
4286
4287/*===========================================================================
4288 * FUNCTION   : handleCameraDeviceError
4289 *
4290 * DESCRIPTION: This function calls internal flush and notifies the error to
4291 *              framework and updates the state variable.
4292 *
4293 * PARAMETERS : None
4294 *
4295 * RETURN     : NO_ERROR on Success
4296 *              Error code on failure
4297 *==========================================================================*/
4298int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4299{
4300    int32_t rc = NO_ERROR;
4301
4302    pthread_mutex_lock(&mMutex);
4303    if (mState != ERROR) {
4304        //if mState != ERROR, nothing to be done
4305        pthread_mutex_unlock(&mMutex);
4306        return NO_ERROR;
4307    }
4308    pthread_mutex_unlock(&mMutex);
4309
4310    rc = flush(false /* restart channels */);
4311    if (NO_ERROR != rc) {
4312        LOGE("internal flush to handle mState = ERROR failed");
4313    }
4314
4315    pthread_mutex_lock(&mMutex);
4316    mState = DEINIT;
4317    pthread_mutex_unlock(&mMutex);
4318
4319    camera3_notify_msg_t notify_msg;
4320    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4321    notify_msg.type = CAMERA3_MSG_ERROR;
4322    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4323    notify_msg.message.error.error_stream = NULL;
4324    notify_msg.message.error.frame_number = 0;
4325    mCallbackOps->notify(mCallbackOps, &notify_msg);
4326
4327    return rc;
4328}
4329
4330/*===========================================================================
4331 * FUNCTION   : captureResultCb
4332 *
4333 * DESCRIPTION: Callback handler for all capture result
4334 *              (streams, as well as metadata)
4335 *
4336 * PARAMETERS :
4337 *   @metadata : metadata information
4338 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4339 *               NULL if metadata.
4340 *
4341 * RETURN     : NONE
4342 *==========================================================================*/
4343void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4344                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4345{
4346    if (metadata_buf) {
4347        if (mBatchSize) {
4348            handleBatchMetadata(metadata_buf,
4349                    true /* free_and_bufdone_meta_buf */);
4350        } else { /* mBatchSize = 0 */
4351            hdrPlusPerfLock(metadata_buf);
4352            pthread_mutex_lock(&mMutex);
4353            handleMetadataWithLock(metadata_buf,
4354                    true /* free_and_bufdone_meta_buf */);
4355            pthread_mutex_unlock(&mMutex);
4356        }
4357    } else if (isInputBuffer) {
4358        pthread_mutex_lock(&mMutex);
4359        handleInputBufferWithLock(frame_number);
4360        pthread_mutex_unlock(&mMutex);
4361    } else {
4362        pthread_mutex_lock(&mMutex);
4363        handleBufferWithLock(buffer, frame_number);
4364        pthread_mutex_unlock(&mMutex);
4365    }
4366    return;
4367}
4368
4369/*===========================================================================
4370 * FUNCTION   : getReprocessibleOutputStreamId
4371 *
4372 * DESCRIPTION: Get source output stream id for the input reprocess stream
4373 *              based on size and format, which would be the largest
4374 *              output stream if an input stream exists.
4375 *
4376 * PARAMETERS :
4377 *   @id      : return the stream id if found
4378 *
4379 * RETURN     : int32_t type of status
4380 *              NO_ERROR  -- success
4381 *              none-zero failure code
4382 *==========================================================================*/
4383int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4384{
4385    /* check if any output or bidirectional stream with the same size and format
4386       and return that stream */
4387    if ((mInputStreamInfo.dim.width > 0) &&
4388            (mInputStreamInfo.dim.height > 0)) {
4389        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4390                it != mStreamInfo.end(); it++) {
4391
4392            camera3_stream_t *stream = (*it)->stream;
4393            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4394                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4395                    (stream->format == mInputStreamInfo.format)) {
4396                // Usage flag for an input stream and the source output stream
4397                // may be different.
4398                LOGD("Found reprocessible output stream! %p", *it);
4399                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4400                         stream->usage, mInputStreamInfo.usage);
4401
4402                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4403                if (channel != NULL && channel->mStreams[0]) {
4404                    id = channel->mStreams[0]->getMyServerID();
4405                    return NO_ERROR;
4406                }
4407            }
4408        }
4409    } else {
4410        LOGD("No input stream, so no reprocessible output stream");
4411    }
4412    return NAME_NOT_FOUND;
4413}
4414
4415/*===========================================================================
4416 * FUNCTION   : lookupFwkName
4417 *
4418 * DESCRIPTION: In case the enum is not same in fwk and backend
4419 *              make sure the parameter is correctly propogated
4420 *
4421 * PARAMETERS  :
4422 *   @arr      : map between the two enums
4423 *   @len      : len of the map
4424 *   @hal_name : name of the hal_parm to map
4425 *
4426 * RETURN     : int type of status
4427 *              fwk_name  -- success
4428 *              none-zero failure code
4429 *==========================================================================*/
4430template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4431        size_t len, halType hal_name)
4432{
4433
4434    for (size_t i = 0; i < len; i++) {
4435        if (arr[i].hal_name == hal_name) {
4436            return arr[i].fwk_name;
4437        }
4438    }
4439
4440    /* Not able to find matching framework type is not necessarily
4441     * an error case. This happens when mm-camera supports more attributes
4442     * than the frameworks do */
4443    LOGH("Cannot find matching framework type");
4444    return NAME_NOT_FOUND;
4445}
4446
4447/*===========================================================================
4448 * FUNCTION   : lookupHalName
4449 *
4450 * DESCRIPTION: In case the enum is not same in fwk and backend
4451 *              make sure the parameter is correctly propogated
4452 *
4453 * PARAMETERS  :
4454 *   @arr      : map between the two enums
4455 *   @len      : len of the map
4456 *   @fwk_name : name of the hal_parm to map
4457 *
4458 * RETURN     : int32_t type of status
4459 *              hal_name  -- success
4460 *              none-zero failure code
4461 *==========================================================================*/
4462template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4463        size_t len, fwkType fwk_name)
4464{
4465    for (size_t i = 0; i < len; i++) {
4466        if (arr[i].fwk_name == fwk_name) {
4467            return arr[i].hal_name;
4468        }
4469    }
4470
4471    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4472    return NAME_NOT_FOUND;
4473}
4474
4475/*===========================================================================
4476 * FUNCTION   : lookupProp
4477 *
4478 * DESCRIPTION: lookup a value by its name
4479 *
4480 * PARAMETERS :
4481 *   @arr     : map between the two enums
4482 *   @len     : size of the map
4483 *   @name    : name to be looked up
4484 *
4485 * RETURN     : Value if found
4486 *              CAM_CDS_MODE_MAX if not found
4487 *==========================================================================*/
4488template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4489        size_t len, const char *name)
4490{
4491    if (name) {
4492        for (size_t i = 0; i < len; i++) {
4493            if (!strcmp(arr[i].desc, name)) {
4494                return arr[i].val;
4495            }
4496        }
4497    }
4498    return CAM_CDS_MODE_MAX;
4499}
4500
4501/*===========================================================================
4502 *
4503 * DESCRIPTION:
4504 *
4505 * PARAMETERS :
4506 *   @metadata : metadata information from callback
4507 *   @timestamp: metadata buffer timestamp
4508 *   @request_id: request id
4509 *   @jpegMetadata: additional jpeg metadata
4510 *   @hybrid_ae_enable: whether hybrid ae is enabled
4511 *   @pprocDone: whether internal offline postprocsesing is done
4512 *
4513 * RETURN     : camera_metadata_t*
4514 *              metadata in a format specified by fwk
4515 *==========================================================================*/
4516camera_metadata_t*
4517QCamera3HardwareInterface::translateFromHalMetadata(
4518                                 metadata_buffer_t *metadata,
4519                                 nsecs_t timestamp,
4520                                 int32_t request_id,
4521                                 const CameraMetadata& jpegMetadata,
4522                                 uint8_t pipeline_depth,
4523                                 uint8_t capture_intent,
4524                                 uint8_t hybrid_ae_enable,
4525                                 bool pprocDone,
4526                                 uint8_t fwk_cacMode)
4527{
4528    CameraMetadata camMetadata;
4529    camera_metadata_t *resultMetadata;
4530
4531    if (jpegMetadata.entryCount())
4532        camMetadata.append(jpegMetadata);
4533
4534    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4535    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4536    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4537    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4538    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4539
4540    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4541        int64_t fwk_frame_number = *frame_number;
4542        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4543    }
4544
4545    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4546        int32_t fps_range[2];
4547        fps_range[0] = (int32_t)float_range->min_fps;
4548        fps_range[1] = (int32_t)float_range->max_fps;
4549        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4550                                      fps_range, 2);
4551        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4552             fps_range[0], fps_range[1]);
4553    }
4554
4555    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4556        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4557    }
4558
4559    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4560        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4561                METADATA_MAP_SIZE(SCENE_MODES_MAP),
4562                *sceneMode);
4563        if (NAME_NOT_FOUND != val) {
4564            uint8_t fwkSceneMode = (uint8_t)val;
4565            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4566            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4567                     fwkSceneMode);
4568        }
4569    }
4570
4571    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4572        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4573        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4574    }
4575
4576    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4577        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4578        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4579    }
4580
4581    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4582        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4583        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4584    }
4585
4586    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4587            CAM_INTF_META_EDGE_MODE, metadata) {
4588        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4589    }
4590
4591    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4592        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4593        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4594    }
4595
4596    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4597        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4598    }
4599
4600    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4601        if (0 <= *flashState) {
4602            uint8_t fwk_flashState = (uint8_t) *flashState;
4603            if (!gCamCapability[mCameraId]->flash_available) {
4604                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4605            }
4606            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4607        }
4608    }
4609
4610    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4611        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4612        if (NAME_NOT_FOUND != val) {
4613            uint8_t fwk_flashMode = (uint8_t)val;
4614            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4615        }
4616    }
4617
4618    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4619        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4620        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4621    }
4622
4623    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4624        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4625    }
4626
4627    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4628        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4629    }
4630
4631    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4632        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4633    }
4634
4635    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4636        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4637        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4638    }
4639
4640    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4641        uint8_t fwk_videoStab = (uint8_t) *videoStab;
4642        LOGD("fwk_videoStab = %d", fwk_videoStab);
4643        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4644    } else {
4645        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
4646        // and so hardcoding the Video Stab result to OFF mode.
4647        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4648        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
4649        LOGD("%s: EIS result default to OFF mode", __func__);
4650    }
4651
4652    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4653        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4654        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4655    }
4656
4657    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4658        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4659    }
4660
4661    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4662        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4663
4664        LOGD("dynamicblackLevel = %f %f %f %f",
4665          blackLevelSourcePattern->cam_black_level[0],
4666          blackLevelSourcePattern->cam_black_level[1],
4667          blackLevelSourcePattern->cam_black_level[2],
4668          blackLevelSourcePattern->cam_black_level[3]);
4669    }
4670
4671    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4672        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4673        float fwk_blackLevelInd[4];
4674
4675        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4676        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4677        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4678        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4679
4680        LOGD("applied dynamicblackLevel = %f %f %f %f",
4681          blackLevelAppliedPattern->cam_black_level[0],
4682          blackLevelAppliedPattern->cam_black_level[1],
4683          blackLevelAppliedPattern->cam_black_level[2],
4684          blackLevelAppliedPattern->cam_black_level[3]);
4685        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4686
4687        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
4688        // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4689        // depth space.
4690        fwk_blackLevelInd[0] /= 64.0;
4691        fwk_blackLevelInd[1] /= 64.0;
4692        fwk_blackLevelInd[2] /= 64.0;
4693        fwk_blackLevelInd[3] /= 64.0;
4694        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4695    }
4696
4697    // Fixed whitelevel is used by ISP/Sensor
4698    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4699            &gCamCapability[mCameraId]->white_level, 1);
4700
4701    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4702            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4703        int32_t scalerCropRegion[4];
4704        scalerCropRegion[0] = hScalerCropRegion->left;
4705        scalerCropRegion[1] = hScalerCropRegion->top;
4706        scalerCropRegion[2] = hScalerCropRegion->width;
4707        scalerCropRegion[3] = hScalerCropRegion->height;
4708
4709        // Adjust crop region from sensor output coordinate system to active
4710        // array coordinate system.
4711        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4712                scalerCropRegion[2], scalerCropRegion[3]);
4713
4714        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4715    }
4716
4717    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4718        LOGD("sensorExpTime = %lld", *sensorExpTime);
4719        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4720    }
4721
4722    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4723            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4724        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
4725        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4726    }
4727
4728    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4729            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4730        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
4731        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4732                sensorRollingShutterSkew, 1);
4733    }
4734
4735    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4736        LOGD("sensorSensitivity = %d", *sensorSensitivity);
4737        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4738
4739        //calculate the noise profile based on sensitivity
4740        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4741        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4742        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4743        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4744            noise_profile[i]   = noise_profile_S;
4745            noise_profile[i+1] = noise_profile_O;
4746        }
4747        LOGD("noise model entry (S, O) is (%f, %f)",
4748                noise_profile_S, noise_profile_O);
4749        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4750                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4751    }
4752
4753    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
4754        int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
4755        camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
4756    }
4757
4758    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4759        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4760        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4761    }
4762
4763    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4764        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4765                *faceDetectMode);
4766        if (NAME_NOT_FOUND != val) {
4767            uint8_t fwk_faceDetectMode = (uint8_t)val;
4768            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4769
4770            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4771                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4772                        CAM_INTF_META_FACE_DETECTION, metadata) {
4773                    uint8_t numFaces = MIN(
4774                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4775                    int32_t faceIds[MAX_ROI];
4776                    uint8_t faceScores[MAX_ROI];
4777                    int32_t faceRectangles[MAX_ROI * 4];
4778                    int32_t faceLandmarks[MAX_ROI * 6];
4779                    size_t j = 0, k = 0;
4780
4781                    for (size_t i = 0; i < numFaces; i++) {
4782                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4783                        // Adjust crop region from sensor output coordinate system to active
4784                        // array coordinate system.
4785                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4786                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4787                                rect.width, rect.height);
4788
4789                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4790                                faceRectangles+j, -1);
4791
4792                        j+= 4;
4793                    }
4794                    if (numFaces <= 0) {
4795                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4796                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4797                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4798                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4799                    }
4800
4801                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4802                            numFaces);
4803                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4804                            faceRectangles, numFaces * 4U);
4805                    if (fwk_faceDetectMode ==
4806                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4807                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
4808                                CAM_INTF_META_FACE_LANDMARK, metadata) {
4809
4810                            for (size_t i = 0; i < numFaces; i++) {
4811                                // Map the co-ordinate sensor output coordinate system to active
4812                                // array coordinate system.
4813                                mCropRegionMapper.toActiveArray(
4814                                        landmarks->face_landmarks[i].left_eye_center.x,
4815                                        landmarks->face_landmarks[i].left_eye_center.y);
4816                                mCropRegionMapper.toActiveArray(
4817                                        landmarks->face_landmarks[i].right_eye_center.x,
4818                                        landmarks->face_landmarks[i].right_eye_center.y);
4819                                mCropRegionMapper.toActiveArray(
4820                                        landmarks->face_landmarks[i].mouth_center.x,
4821                                        landmarks->face_landmarks[i].mouth_center.y);
4822
4823                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
4824                                k+= 6;
4825                            }
4826                        }
4827
4828                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4829                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4830                                faceLandmarks, numFaces * 6U);
4831                   }
4832                }
4833            }
4834        }
4835    }
4836
4837    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4838        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4839        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4840    }
4841
4842    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4843            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4844        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4845        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4846    }
4847
4848    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4849            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4850        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4851                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4852    }
4853
4854    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4855            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4856        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4857                CAM_MAX_SHADING_MAP_HEIGHT);
4858        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4859                CAM_MAX_SHADING_MAP_WIDTH);
4860        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4861                lensShadingMap->lens_shading, 4U * map_width * map_height);
4862    }
4863
4864    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4865        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4866        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4867    }
4868
4869    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4870        //Populate CAM_INTF_META_TONEMAP_CURVES
4871        /* ch0 = G, ch 1 = B, ch 2 = R*/
4872        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4873            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
4874                     tonemap->tonemap_points_cnt,
4875                    CAM_MAX_TONEMAP_CURVE_SIZE);
4876            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4877        }
4878
4879        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4880                        &tonemap->curves[0].tonemap_points[0][0],
4881                        tonemap->tonemap_points_cnt * 2);
4882
4883        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4884                        &tonemap->curves[1].tonemap_points[0][0],
4885                        tonemap->tonemap_points_cnt * 2);
4886
4887        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4888                        &tonemap->curves[2].tonemap_points[0][0],
4889                        tonemap->tonemap_points_cnt * 2);
4890    }
4891
4892    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4893            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4894        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4895                CC_GAINS_COUNT);
4896    }
4897
4898    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4899            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4900        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4901                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4902                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4903    }
4904
4905    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4906            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4907        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4908            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
4909                     toneCurve->tonemap_points_cnt,
4910                    CAM_MAX_TONEMAP_CURVE_SIZE);
4911            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4912        }
4913        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4914                (float*)toneCurve->curve.tonemap_points,
4915                toneCurve->tonemap_points_cnt * 2);
4916    }
4917
4918    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4919            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4920        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4921                predColorCorrectionGains->gains, 4);
4922    }
4923
4924    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4925            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4926        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4927                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4928                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4929    }
4930
4931    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4932        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4933    }
4934
4935    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4936        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4937        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4938    }
4939
4940    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4941        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4942        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4943    }
4944
4945    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4946        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4947                *effectMode);
4948        if (NAME_NOT_FOUND != val) {
4949            uint8_t fwk_effectMode = (uint8_t)val;
4950            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4951        }
4952    }
4953
4954    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4955            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4956        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4957                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4958        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4959            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4960        }
4961        int32_t fwk_testPatternData[4];
4962        fwk_testPatternData[0] = testPatternData->r;
4963        fwk_testPatternData[3] = testPatternData->b;
4964        switch (gCamCapability[mCameraId]->color_arrangement) {
4965        case CAM_FILTER_ARRANGEMENT_RGGB:
4966        case CAM_FILTER_ARRANGEMENT_GRBG:
4967            fwk_testPatternData[1] = testPatternData->gr;
4968            fwk_testPatternData[2] = testPatternData->gb;
4969            break;
4970        case CAM_FILTER_ARRANGEMENT_GBRG:
4971        case CAM_FILTER_ARRANGEMENT_BGGR:
4972            fwk_testPatternData[2] = testPatternData->gr;
4973            fwk_testPatternData[1] = testPatternData->gb;
4974            break;
4975        default:
4976            LOGE("color arrangement %d is not supported",
4977                gCamCapability[mCameraId]->color_arrangement);
4978            break;
4979        }
4980        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4981    }
4982
4983    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4984        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4985    }
4986
4987    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4988        String8 str((const char *)gps_methods);
4989        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4990    }
4991
4992    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4993        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4994    }
4995
4996    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4997        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4998    }
4999
5000    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5001        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5002        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5003    }
5004
5005    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5006        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5007        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5008    }
5009
5010    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5011        int32_t fwk_thumb_size[2];
5012        fwk_thumb_size[0] = thumb_size->width;
5013        fwk_thumb_size[1] = thumb_size->height;
5014        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5015    }
5016
5017    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5018        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5019                privateData,
5020                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5021    }
5022
5023    if (metadata->is_tuning_params_valid) {
5024        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5025        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5026        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5027
5028
5029        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5030                sizeof(uint32_t));
5031        data += sizeof(uint32_t);
5032
5033        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5034                sizeof(uint32_t));
5035        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5036        data += sizeof(uint32_t);
5037
5038        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5039                sizeof(uint32_t));
5040        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5041        data += sizeof(uint32_t);
5042
5043        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5044                sizeof(uint32_t));
5045        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5046        data += sizeof(uint32_t);
5047
5048        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5049                sizeof(uint32_t));
5050        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5051        data += sizeof(uint32_t);
5052
5053        metadata->tuning_params.tuning_mod3_data_size = 0;
5054        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5055                sizeof(uint32_t));
5056        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5057        data += sizeof(uint32_t);
5058
5059        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5060                TUNING_SENSOR_DATA_MAX);
5061        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5062                count);
5063        data += count;
5064
5065        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5066                TUNING_VFE_DATA_MAX);
5067        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5068                count);
5069        data += count;
5070
5071        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5072                TUNING_CPP_DATA_MAX);
5073        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5074                count);
5075        data += count;
5076
5077        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5078                TUNING_CAC_DATA_MAX);
5079        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5080                count);
5081        data += count;
5082
5083        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5084                (int32_t *)(void *)tuning_meta_data_blob,
5085                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5086    }
5087
5088    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5089            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5090        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5091                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5092                NEUTRAL_COL_POINTS);
5093    }
5094
5095    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5096        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5097        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5098    }
5099
5100    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5101        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5102        // Adjust crop region from sensor output coordinate system to active
5103        // array coordinate system.
5104        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5105                hAeRegions->rect.width, hAeRegions->rect.height);
5106
5107        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5108        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5109                REGIONS_TUPLE_COUNT);
5110        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5111                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5112                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5113                hAeRegions->rect.height);
5114    }
5115
5116    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5117        uint8_t fwk_afState = (uint8_t) *afState;
5118        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5119        LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5120    }
5121
5122    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5123        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5124    }
5125
5126    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5127        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5128    }
5129
5130    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5131        uint8_t fwk_lensState = *lensState;
5132        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5133    }
5134
5135    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5136        /*af regions*/
5137        int32_t afRegions[REGIONS_TUPLE_COUNT];
5138        // Adjust crop region from sensor output coordinate system to active
5139        // array coordinate system.
5140        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5141                hAfRegions->rect.width, hAfRegions->rect.height);
5142
5143        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5144        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5145                REGIONS_TUPLE_COUNT);
5146        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5147                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5148                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5149                hAfRegions->rect.height);
5150    }
5151
5152    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5153        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5154                *hal_ab_mode);
5155        if (NAME_NOT_FOUND != val) {
5156            uint8_t fwk_ab_mode = (uint8_t)val;
5157            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5158        }
5159    }
5160
5161    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5162        int val = lookupFwkName(SCENE_MODES_MAP,
5163                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5164        if (NAME_NOT_FOUND != val) {
5165            uint8_t fwkBestshotMode = (uint8_t)val;
5166            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5167            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5168        } else {
5169            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5170        }
5171    }
5172
5173    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5174         uint8_t fwk_mode = (uint8_t) *mode;
5175         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5176    }
5177
5178    /* Constant metadata values to be update*/
5179    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5180    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5181
5182    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5183    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5184
5185    int32_t hotPixelMap[2];
5186    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5187
5188    // CDS
5189    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5190        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5191    }
5192
5193    // TNR
5194    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5195        uint8_t tnr_enable       = tnr->denoise_enable;
5196        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5197
5198        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5199        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5200    }
5201
5202    // Reprocess crop data
5203    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5204        uint8_t cnt = crop_data->num_of_streams;
5205        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5206            // mm-qcamera-daemon only posts crop_data for streams
5207            // not linked to pproc. So no valid crop metadata is not
5208            // necessarily an error case.
5209            LOGD("No valid crop metadata entries");
5210        } else {
5211            uint32_t reproc_stream_id;
5212            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5213                LOGD("No reprocessible stream found, ignore crop data");
5214            } else {
5215                int rc = NO_ERROR;
5216                Vector<int32_t> roi_map;
5217                int32_t *crop = new int32_t[cnt*4];
5218                if (NULL == crop) {
5219                   rc = NO_MEMORY;
5220                }
5221                if (NO_ERROR == rc) {
5222                    int32_t streams_found = 0;
5223                    for (size_t i = 0; i < cnt; i++) {
5224                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5225                            if (pprocDone) {
5226                                // HAL already does internal reprocessing,
5227                                // either via reprocessing before JPEG encoding,
5228                                // or offline postprocessing for pproc bypass case.
5229                                crop[0] = 0;
5230                                crop[1] = 0;
5231                                crop[2] = mInputStreamInfo.dim.width;
5232                                crop[3] = mInputStreamInfo.dim.height;
5233                            } else {
5234                                crop[0] = crop_data->crop_info[i].crop.left;
5235                                crop[1] = crop_data->crop_info[i].crop.top;
5236                                crop[2] = crop_data->crop_info[i].crop.width;
5237                                crop[3] = crop_data->crop_info[i].crop.height;
5238                            }
5239                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5240                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5241                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5242                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5243                            streams_found++;
5244                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5245                                    crop[0], crop[1], crop[2], crop[3]);
5246                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5247                                    crop_data->crop_info[i].roi_map.left,
5248                                    crop_data->crop_info[i].roi_map.top,
5249                                    crop_data->crop_info[i].roi_map.width,
5250                                    crop_data->crop_info[i].roi_map.height);
5251                            break;
5252
5253                       }
5254                    }
5255                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5256                            &streams_found, 1);
5257                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5258                            crop, (size_t)(streams_found * 4));
5259                    if (roi_map.array()) {
5260                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5261                                roi_map.array(), roi_map.size());
5262                    }
5263               }
5264               if (crop) {
5265                   delete [] crop;
5266               }
5267            }
5268        }
5269    }
5270
5271    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5272        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5273        // so hardcoding the CAC result to OFF mode.
5274        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5275        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5276    } else {
5277        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5278            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5279                    *cacMode);
5280            if (NAME_NOT_FOUND != val) {
5281                uint8_t resultCacMode = (uint8_t)val;
5282                // check whether CAC result from CB is equal to Framework set CAC mode
5283                // If not equal then set the CAC mode came in corresponding request
5284                if (fwk_cacMode != resultCacMode) {
5285                    resultCacMode = fwk_cacMode;
5286                }
5287                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5288                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5289            } else {
5290                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5291            }
5292        }
5293    }
5294
5295    // Post blob of cam_cds_data through vendor tag.
5296    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5297        uint8_t cnt = cdsInfo->num_of_streams;
5298        cam_cds_data_t cdsDataOverride;
5299        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5300        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5301        cdsDataOverride.num_of_streams = 1;
5302        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5303            uint32_t reproc_stream_id;
5304            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5305                LOGD("No reprocessible stream found, ignore cds data");
5306            } else {
5307                for (size_t i = 0; i < cnt; i++) {
5308                    if (cdsInfo->cds_info[i].stream_id ==
5309                            reproc_stream_id) {
5310                        cdsDataOverride.cds_info[0].cds_enable =
5311                                cdsInfo->cds_info[i].cds_enable;
5312                        break;
5313                    }
5314                }
5315            }
5316        } else {
5317            LOGD("Invalid stream count %d in CDS_DATA", cnt);
5318        }
5319        camMetadata.update(QCAMERA3_CDS_INFO,
5320                (uint8_t *)&cdsDataOverride,
5321                sizeof(cam_cds_data_t));
5322    }
5323
5324    // Ldaf calibration data
5325    if (!mLdafCalibExist) {
5326        IF_META_AVAILABLE(uint32_t, ldafCalib,
5327                CAM_INTF_META_LDAF_EXIF, metadata) {
5328            mLdafCalibExist = true;
5329            mLdafCalib[0] = ldafCalib[0];
5330            mLdafCalib[1] = ldafCalib[1];
5331            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5332                    ldafCalib[0], ldafCalib[1]);
5333        }
5334    }
5335
5336    resultMetadata = camMetadata.release();
5337    return resultMetadata;
5338}
5339
5340/*===========================================================================
5341 * FUNCTION   : saveExifParams
5342 *
5343 * DESCRIPTION:
5344 *
5345 * PARAMETERS :
5346 *   @metadata : metadata information from callback
5347 *
5348 * RETURN     : none
5349 *
5350 *==========================================================================*/
5351void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5352{
5353    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5354            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5355        if (mExifParams.debug_params) {
5356            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5357            mExifParams.debug_params->ae_debug_params_valid = TRUE;
5358        }
5359    }
5360    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5361            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5362        if (mExifParams.debug_params) {
5363            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5364            mExifParams.debug_params->awb_debug_params_valid = TRUE;
5365        }
5366    }
5367    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5368            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5369        if (mExifParams.debug_params) {
5370            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5371            mExifParams.debug_params->af_debug_params_valid = TRUE;
5372        }
5373    }
5374    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5375            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5376        if (mExifParams.debug_params) {
5377            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5378            mExifParams.debug_params->asd_debug_params_valid = TRUE;
5379        }
5380    }
5381    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5382            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5383        if (mExifParams.debug_params) {
5384            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5385            mExifParams.debug_params->stats_debug_params_valid = TRUE;
5386        }
5387    }
5388}
5389
5390/*===========================================================================
5391 * FUNCTION   : get3AExifParams
5392 *
5393 * DESCRIPTION:
5394 *
5395 * PARAMETERS : none
5396 *
5397 *
5398 * RETURN     : mm_jpeg_exif_params_t
5399 *
5400 *==========================================================================*/
5401mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5402{
5403    return mExifParams;
5404}
5405
5406/*===========================================================================
5407 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
5408 *
5409 * DESCRIPTION:
5410 *
5411 * PARAMETERS :
5412 *   @metadata : metadata information from callback
5413 *
5414 * RETURN     : camera_metadata_t*
5415 *              metadata in a format specified by fwk
5416 *==========================================================================*/
5417camera_metadata_t*
5418QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5419                                (metadata_buffer_t *metadata)
5420{
5421    CameraMetadata camMetadata;
5422    camera_metadata_t *resultMetadata;
5423
5424
5425    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5426        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5427        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5428        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
5429    }
5430
5431    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5432        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5433                &aecTrigger->trigger, 1);
5434        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5435                &aecTrigger->trigger_id, 1);
5436        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5437                 aecTrigger->trigger);
5438        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
5439                aecTrigger->trigger_id);
5440    }
5441
5442    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5443        uint8_t fwk_ae_state = (uint8_t) *ae_state;
5444        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5445        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
5446    }
5447
5448    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5449        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5450        if (NAME_NOT_FOUND != val) {
5451            uint8_t fwkAfMode = (uint8_t)val;
5452            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5453            LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5454        } else {
5455            LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5456                    val);
5457        }
5458    }
5459
5460    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5461        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5462                &af_trigger->trigger, 1);
5463        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5464                 af_trigger->trigger);
5465        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5466        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
5467                af_trigger->trigger_id);
5468    }
5469
5470    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5471        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5472                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5473        if (NAME_NOT_FOUND != val) {
5474            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5475            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5476            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
5477        } else {
5478            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
5479        }
5480    }
5481
5482    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5483    uint32_t aeMode = CAM_AE_MODE_MAX;
5484    int32_t flashMode = CAM_FLASH_MODE_MAX;
5485    int32_t redeye = -1;
5486    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5487        aeMode = *pAeMode;
5488    }
5489    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5490        flashMode = *pFlashMode;
5491    }
5492    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5493        redeye = *pRedeye;
5494    }
5495
5496    if (1 == redeye) {
5497        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5498        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5499    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5500        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5501                flashMode);
5502        if (NAME_NOT_FOUND != val) {
5503            fwk_aeMode = (uint8_t)val;
5504            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5505        } else {
5506            LOGE("Unsupported flash mode %d", flashMode);
5507        }
5508    } else if (aeMode == CAM_AE_MODE_ON) {
5509        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5510        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5511    } else if (aeMode == CAM_AE_MODE_OFF) {
5512        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5513        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5514    } else {
5515        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5516              "flashMode:%d, aeMode:%u!!!",
5517                 redeye, flashMode, aeMode);
5518    }
5519
5520    resultMetadata = camMetadata.release();
5521    return resultMetadata;
5522}
5523
5524/*===========================================================================
5525 * FUNCTION   : dumpMetadataToFile
5526 *
5527 * DESCRIPTION: Dumps tuning metadata to file system
5528 *
5529 * PARAMETERS :
5530 *   @meta           : tuning metadata
5531 *   @dumpFrameCount : current dump frame count
5532 *   @enabled        : Enable mask
5533 *
5534 *==========================================================================*/
5535void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5536                                                   uint32_t &dumpFrameCount,
5537                                                   bool enabled,
5538                                                   const char *type,
5539                                                   uint32_t frameNumber)
5540{
5541    //Some sanity checks
5542    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5543        LOGE("Tuning sensor data size bigger than expected %d: %d",
5544              meta.tuning_sensor_data_size,
5545              TUNING_SENSOR_DATA_MAX);
5546        return;
5547    }
5548
5549    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5550        LOGE("Tuning VFE data size bigger than expected %d: %d",
5551              meta.tuning_vfe_data_size,
5552              TUNING_VFE_DATA_MAX);
5553        return;
5554    }
5555
5556    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5557        LOGE("Tuning CPP data size bigger than expected %d: %d",
5558              meta.tuning_cpp_data_size,
5559              TUNING_CPP_DATA_MAX);
5560        return;
5561    }
5562
5563    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5564        LOGE("Tuning CAC data size bigger than expected %d: %d",
5565              meta.tuning_cac_data_size,
5566              TUNING_CAC_DATA_MAX);
5567        return;
5568    }
5569    //
5570
5571    if(enabled){
5572        char timeBuf[FILENAME_MAX];
5573        char buf[FILENAME_MAX];
5574        memset(buf, 0, sizeof(buf));
5575        memset(timeBuf, 0, sizeof(timeBuf));
5576        time_t current_time;
5577        struct tm * timeinfo;
5578        time (&current_time);
5579        timeinfo = localtime (&current_time);
5580        if (timeinfo != NULL) {
5581            strftime (timeBuf, sizeof(timeBuf),
5582                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5583        }
5584        String8 filePath(timeBuf);
5585        snprintf(buf,
5586                sizeof(buf),
5587                "%dm_%s_%d.bin",
5588                dumpFrameCount,
5589                type,
5590                frameNumber);
5591        filePath.append(buf);
5592        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5593        if (file_fd >= 0) {
5594            ssize_t written_len = 0;
5595            meta.tuning_data_version = TUNING_DATA_VERSION;
5596            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5597            written_len += write(file_fd, data, sizeof(uint32_t));
5598            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5599            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5600            written_len += write(file_fd, data, sizeof(uint32_t));
5601            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5602            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5603            written_len += write(file_fd, data, sizeof(uint32_t));
5604            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5605            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5606            written_len += write(file_fd, data, sizeof(uint32_t));
5607            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5608            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5609            written_len += write(file_fd, data, sizeof(uint32_t));
5610            meta.tuning_mod3_data_size = 0;
5611            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5612            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5613            written_len += write(file_fd, data, sizeof(uint32_t));
5614            size_t total_size = meta.tuning_sensor_data_size;
5615            data = (void *)((uint8_t *)&meta.data);
5616            written_len += write(file_fd, data, total_size);
5617            total_size = meta.tuning_vfe_data_size;
5618            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5619            written_len += write(file_fd, data, total_size);
5620            total_size = meta.tuning_cpp_data_size;
5621            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5622            written_len += write(file_fd, data, total_size);
5623            total_size = meta.tuning_cac_data_size;
5624            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5625            written_len += write(file_fd, data, total_size);
5626            close(file_fd);
5627        }else {
5628            LOGE("fail to open file for metadata dumping");
5629        }
5630    }
5631}
5632
5633/*===========================================================================
5634 * FUNCTION   : cleanAndSortStreamInfo
5635 *
5636 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5637 *              and sort them such that raw stream is at the end of the list
5638 *              This is a workaround for camera daemon constraint.
5639 *
5640 * PARAMETERS : None
5641 *
5642 *==========================================================================*/
5643void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5644{
5645    List<stream_info_t *> newStreamInfo;
5646
5647    /*clean up invalid streams*/
5648    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5649            it != mStreamInfo.end();) {
5650        if(((*it)->status) == INVALID){
5651            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5652            delete channel;
5653            free(*it);
5654            it = mStreamInfo.erase(it);
5655        } else {
5656            it++;
5657        }
5658    }
5659
5660    // Move preview/video/callback/snapshot streams into newList
5661    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5662            it != mStreamInfo.end();) {
5663        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5664                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5665                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5666            newStreamInfo.push_back(*it);
5667            it = mStreamInfo.erase(it);
5668        } else
5669            it++;
5670    }
5671    // Move raw streams into newList
5672    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5673            it != mStreamInfo.end();) {
5674        newStreamInfo.push_back(*it);
5675        it = mStreamInfo.erase(it);
5676    }
5677
5678    mStreamInfo = newStreamInfo;
5679}
5680
5681/*===========================================================================
5682 * FUNCTION   : extractJpegMetadata
5683 *
5684 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5685 *              JPEG metadata is cached in HAL, and return as part of capture
5686 *              result when metadata is returned from camera daemon.
5687 *
5688 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5689 *              @request:      capture request
5690 *
5691 *==========================================================================*/
5692void QCamera3HardwareInterface::extractJpegMetadata(
5693        CameraMetadata& jpegMetadata,
5694        const camera3_capture_request_t *request)
5695{
5696    CameraMetadata frame_settings;
5697    frame_settings = request->settings;
5698
5699    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5700        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5701                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5702                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5703
5704    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5705        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5706                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5707                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5708
5709    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5710        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5711                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5712                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5713
5714    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5715        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5716                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5717                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5718
5719    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5720        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5721                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5722                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5723
5724    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5725        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5726                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5727                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5728
5729    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5730        int32_t thumbnail_size[2];
5731        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5732        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5733        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5734            int32_t orientation =
5735                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5736            if ((orientation == 90) || (orientation == 270)) {
5737               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5738               int32_t temp;
5739               temp = thumbnail_size[0];
5740               thumbnail_size[0] = thumbnail_size[1];
5741               thumbnail_size[1] = temp;
5742            }
5743         }
5744         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5745                thumbnail_size,
5746                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5747    }
5748
5749}
5750
5751/*===========================================================================
5752 * FUNCTION   : convertToRegions
5753 *
5754 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5755 *
5756 * PARAMETERS :
5757 *   @rect   : cam_rect_t struct to convert
5758 *   @region : int32_t destination array
5759 *   @weight : if we are converting from cam_area_t, weight is valid
5760 *             else weight = -1
5761 *
5762 *==========================================================================*/
5763void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5764        int32_t *region, int weight)
5765{
5766    region[0] = rect.left;
5767    region[1] = rect.top;
5768    region[2] = rect.left + rect.width;
5769    region[3] = rect.top + rect.height;
5770    if (weight > -1) {
5771        region[4] = weight;
5772    }
5773}
5774
5775/*===========================================================================
5776 * FUNCTION   : convertFromRegions
5777 *
5778 * DESCRIPTION: helper method to convert from array to cam_rect_t
5779 *
5780 * PARAMETERS :
5781 *   @rect   : cam_rect_t struct to convert
5782 *   @region : int32_t destination array
5783 *   @weight : if we are converting from cam_area_t, weight is valid
5784 *             else weight = -1
5785 *
5786 *==========================================================================*/
5787void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5788        const camera_metadata_t *settings, uint32_t tag)
5789{
5790    CameraMetadata frame_settings;
5791    frame_settings = settings;
5792    int32_t x_min = frame_settings.find(tag).data.i32[0];
5793    int32_t y_min = frame_settings.find(tag).data.i32[1];
5794    int32_t x_max = frame_settings.find(tag).data.i32[2];
5795    int32_t y_max = frame_settings.find(tag).data.i32[3];
5796    roi.weight = frame_settings.find(tag).data.i32[4];
5797    roi.rect.left = x_min;
5798    roi.rect.top = y_min;
5799    roi.rect.width = x_max - x_min;
5800    roi.rect.height = y_max - y_min;
5801}
5802
5803/*===========================================================================
5804 * FUNCTION   : resetIfNeededROI
5805 *
5806 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5807 *              crop region
5808 *
5809 * PARAMETERS :
5810 *   @roi       : cam_area_t struct to resize
5811 *   @scalerCropRegion : cam_crop_region_t region to compare against
5812 *
5813 *
5814 *==========================================================================*/
5815bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5816                                                 const cam_crop_region_t* scalerCropRegion)
5817{
5818    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5819    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5820    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5821    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5822
5823    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5824     * without having this check the calculations below to validate if the roi
5825     * is inside scalar crop region will fail resulting in the roi not being
5826     * reset causing algorithm to continue to use stale roi window
5827     */
5828    if (roi->weight == 0) {
5829        return true;
5830    }
5831
5832    if ((roi_x_max < scalerCropRegion->left) ||
5833        // right edge of roi window is left of scalar crop's left edge
5834        (roi_y_max < scalerCropRegion->top)  ||
5835        // bottom edge of roi window is above scalar crop's top edge
5836        (roi->rect.left > crop_x_max) ||
5837        // left edge of roi window is beyond(right) of scalar crop's right edge
5838        (roi->rect.top > crop_y_max)){
5839        // top edge of roi windo is above scalar crop's top edge
5840        return false;
5841    }
5842    if (roi->rect.left < scalerCropRegion->left) {
5843        roi->rect.left = scalerCropRegion->left;
5844    }
5845    if (roi->rect.top < scalerCropRegion->top) {
5846        roi->rect.top = scalerCropRegion->top;
5847    }
5848    if (roi_x_max > crop_x_max) {
5849        roi_x_max = crop_x_max;
5850    }
5851    if (roi_y_max > crop_y_max) {
5852        roi_y_max = crop_y_max;
5853    }
5854    roi->rect.width = roi_x_max - roi->rect.left;
5855    roi->rect.height = roi_y_max - roi->rect.top;
5856    return true;
5857}
5858
5859/*===========================================================================
5860 * FUNCTION   : convertLandmarks
5861 *
5862 * DESCRIPTION: helper method to extract the landmarks from face detection info
5863 *
5864 * PARAMETERS :
5865 *   @landmark_data : input landmark data to be converted
5866 *   @landmarks : int32_t destination array
5867 *
5868 *
5869 *==========================================================================*/
5870void QCamera3HardwareInterface::convertLandmarks(
5871        cam_face_landmarks_info_t landmark_data,
5872        int32_t *landmarks)
5873{
5874    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
5875    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
5876    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
5877    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
5878    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
5879    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
5880}
5881
5882#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5883/*===========================================================================
5884 * FUNCTION   : initCapabilities
5885 *
5886 * DESCRIPTION: initialize camera capabilities in static data struct
5887 *
5888 * PARAMETERS :
5889 *   @cameraId  : camera Id
5890 *
5891 * RETURN     : int32_t type of status
5892 *              NO_ERROR  -- success
5893 *              none-zero failure code
5894 *==========================================================================*/
5895int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5896{
5897    int rc = 0;
5898    mm_camera_vtbl_t *cameraHandle = NULL;
5899    QCamera3HeapMemory *capabilityHeap = NULL;
5900
5901    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5902    if (rc) {
5903        LOGE("camera_open failed. rc = %d", rc);
5904        goto open_failed;
5905    }
5906    if (!cameraHandle) {
5907        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
5908        goto open_failed;
5909    }
5910
5911    capabilityHeap = new QCamera3HeapMemory(1);
5912    if (capabilityHeap == NULL) {
5913        LOGE("creation of capabilityHeap failed");
5914        goto heap_creation_failed;
5915    }
5916    /* Allocate memory for capability buffer */
5917    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5918    if(rc != OK) {
5919        LOGE("No memory for cappability");
5920        goto allocate_failed;
5921    }
5922
5923    /* Map memory for capability buffer */
5924    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5925    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5926                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5927                                capabilityHeap->getFd(0),
5928                                sizeof(cam_capability_t));
5929    if(rc < 0) {
5930        LOGE("failed to map capability buffer");
5931        goto map_failed;
5932    }
5933
5934    /* Query Capability */
5935    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5936    if(rc < 0) {
5937        LOGE("failed to query capability");
5938        goto query_failed;
5939    }
5940    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5941    if (!gCamCapability[cameraId]) {
5942        LOGE("out of memory");
5943        goto query_failed;
5944    }
5945    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5946                                        sizeof(cam_capability_t));
5947    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_x = 0;
5948    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_y = 0;
5949    rc = 0;
5950
5951query_failed:
5952    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5953                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5954map_failed:
5955    capabilityHeap->deallocate();
5956allocate_failed:
5957    delete capabilityHeap;
5958heap_creation_failed:
5959    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5960    cameraHandle = NULL;
5961open_failed:
5962    return rc;
5963}
5964
5965/*==========================================================================
5966 * FUNCTION   : get3Aversion
5967 *
5968 * DESCRIPTION: get the Q3A S/W version
5969 *
5970 * PARAMETERS :
5971 *  @sw_version: Reference of Q3A structure which will hold version info upon
5972 *               return
5973 *
5974 * RETURN     : None
5975 *
5976 *==========================================================================*/
5977void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5978{
5979    if(gCamCapability[mCameraId])
5980        sw_version = gCamCapability[mCameraId]->q3a_version;
5981    else
5982        LOGE("Capability structure NULL!");
5983}
5984
5985
5986/*===========================================================================
5987 * FUNCTION   : initParameters
5988 *
5989 * DESCRIPTION: initialize camera parameters
5990 *
5991 * PARAMETERS :
5992 *
5993 * RETURN     : int32_t type of status
5994 *              NO_ERROR  -- success
5995 *              none-zero failure code
5996 *==========================================================================*/
5997int QCamera3HardwareInterface::initParameters()
5998{
5999    int rc = 0;
6000
6001    //Allocate Set Param Buffer
6002    mParamHeap = new QCamera3HeapMemory(1);
6003    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6004    if(rc != OK) {
6005        rc = NO_MEMORY;
6006        LOGE("Failed to allocate SETPARM Heap memory");
6007        delete mParamHeap;
6008        mParamHeap = NULL;
6009        return rc;
6010    }
6011
6012    //Map memory for parameters buffer
6013    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6014            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6015            mParamHeap->getFd(0),
6016            sizeof(metadata_buffer_t));
6017    if(rc < 0) {
6018        LOGE("failed to map SETPARM buffer");
6019        rc = FAILED_TRANSACTION;
6020        mParamHeap->deallocate();
6021        delete mParamHeap;
6022        mParamHeap = NULL;
6023        return rc;
6024    }
6025
6026    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6027
6028    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6029    return rc;
6030}
6031
6032/*===========================================================================
6033 * FUNCTION   : deinitParameters
6034 *
6035 * DESCRIPTION: de-initialize camera parameters
6036 *
6037 * PARAMETERS :
6038 *
6039 * RETURN     : NONE
6040 *==========================================================================*/
6041void QCamera3HardwareInterface::deinitParameters()
6042{
6043    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6044            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6045
6046    mParamHeap->deallocate();
6047    delete mParamHeap;
6048    mParamHeap = NULL;
6049
6050    mParameters = NULL;
6051
6052    free(mPrevParameters);
6053    mPrevParameters = NULL;
6054}
6055
6056/*===========================================================================
6057 * FUNCTION   : calcMaxJpegSize
6058 *
6059 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6060 *
6061 * PARAMETERS :
6062 *
6063 * RETURN     : max_jpeg_size
6064 *==========================================================================*/
6065size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6066{
6067    size_t max_jpeg_size = 0;
6068    size_t temp_width, temp_height;
6069    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6070            MAX_SIZES_CNT);
6071    for (size_t i = 0; i < count; i++) {
6072        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6073        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6074        if (temp_width * temp_height > max_jpeg_size ) {
6075            max_jpeg_size = temp_width * temp_height;
6076        }
6077    }
6078    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6079    return max_jpeg_size;
6080}
6081
6082/*===========================================================================
6083 * FUNCTION   : getMaxRawSize
6084 *
6085 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6086 *
6087 * PARAMETERS :
6088 *
6089 * RETURN     : Largest supported Raw Dimension
6090 *==========================================================================*/
6091cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6092{
6093    int max_width = 0;
6094    cam_dimension_t maxRawSize;
6095
6096    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6097    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6098        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6099            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6100            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6101        }
6102    }
6103    return maxRawSize;
6104}
6105
6106
6107/*===========================================================================
6108 * FUNCTION   : calcMaxJpegDim
6109 *
6110 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6111 *
6112 * PARAMETERS :
6113 *
6114 * RETURN     : max_jpeg_dim
6115 *==========================================================================*/
6116cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6117{
6118    cam_dimension_t max_jpeg_dim;
6119    cam_dimension_t curr_jpeg_dim;
6120    max_jpeg_dim.width = 0;
6121    max_jpeg_dim.height = 0;
6122    curr_jpeg_dim.width = 0;
6123    curr_jpeg_dim.height = 0;
6124    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6125        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6126        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6127        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6128            max_jpeg_dim.width * max_jpeg_dim.height ) {
6129            max_jpeg_dim.width = curr_jpeg_dim.width;
6130            max_jpeg_dim.height = curr_jpeg_dim.height;
6131        }
6132    }
6133    return max_jpeg_dim;
6134}
6135
6136/*===========================================================================
6137 * FUNCTION   : addStreamConfig
6138 *
6139 * DESCRIPTION: adds the stream configuration to the array
6140 *
6141 * PARAMETERS :
6142 * @available_stream_configs : pointer to stream configuration array
6143 * @scalar_format            : scalar format
6144 * @dim                      : configuration dimension
6145 * @config_type              : input or output configuration type
6146 *
6147 * RETURN     : NONE
6148 *==========================================================================*/
6149void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6150        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6151{
6152    available_stream_configs.add(scalar_format);
6153    available_stream_configs.add(dim.width);
6154    available_stream_configs.add(dim.height);
6155    available_stream_configs.add(config_type);
6156}
6157
6158/*===========================================================================
6159 * FUNCTION   : suppportBurstCapture
6160 *
6161 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6162 *
6163 * PARAMETERS :
6164 *   @cameraId  : camera Id
6165 *
6166 * RETURN     : true if camera supports BURST_CAPTURE
6167 *              false otherwise
6168 *==========================================================================*/
6169bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6170{
6171    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6172    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6173    const int32_t highResWidth = 3264;
6174    const int32_t highResHeight = 2448;
6175
6176    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6177        // Maximum resolution images cannot be captured at >= 10fps
6178        // -> not supporting BURST_CAPTURE
6179        return false;
6180    }
6181
6182    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6183        // Maximum resolution images can be captured at >= 20fps
6184        // --> supporting BURST_CAPTURE
6185        return true;
6186    }
6187
6188    // Find the smallest highRes resolution, or largest resolution if there is none
6189    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6190            MAX_SIZES_CNT);
6191    size_t highRes = 0;
6192    while ((highRes + 1 < totalCnt) &&
6193            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6194            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6195            highResWidth * highResHeight)) {
6196        highRes++;
6197    }
6198    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6199        return true;
6200    } else {
6201        return false;
6202    }
6203}
6204
6205/*===========================================================================
6206 * FUNCTION   : initStaticMetadata
6207 *
6208 * DESCRIPTION: initialize the static metadata
6209 *
6210 * PARAMETERS :
6211 *   @cameraId  : camera Id
6212 *
6213 * RETURN     : int32_t type of status
6214 *              0  -- success
6215 *              non-zero failure code
6216 *==========================================================================*/
6217int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6218{
6219    int rc = 0;
6220    CameraMetadata staticInfo;
6221    size_t count = 0;
6222    bool limitedDevice = false;
6223    char prop[PROPERTY_VALUE_MAX];
6224    bool supportBurst = false;
6225
6226    supportBurst = supportBurstCapture(cameraId);
6227
6228    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6229     * guaranteed or if min fps of max resolution is less than 20 fps, its
6230     * advertised as limited device*/
6231    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6232            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6233            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6234            !supportBurst;
6235
6236    uint8_t supportedHwLvl = limitedDevice ?
6237            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6238            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
6239
6240    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6241            &supportedHwLvl, 1);
6242
6243    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6244    /*HAL 3 only*/
6245    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6246                    &gCamCapability[cameraId]->min_focus_distance, 1);
6247
6248    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6249                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6250
6251    /*should be using focal lengths but sensor doesn't provide that info now*/
6252    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6253                      &gCamCapability[cameraId]->focal_length,
6254                      1);
6255
6256    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6257            gCamCapability[cameraId]->apertures,
6258            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6259
6260    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6261            gCamCapability[cameraId]->filter_densities,
6262            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6263
6264
6265    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6266            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6267            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6268
6269    int32_t lens_shading_map_size[] = {
6270            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6271            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6272    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6273                      lens_shading_map_size,
6274                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6275
6276    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6277            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6278
6279    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6280            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6281
6282    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6283            &gCamCapability[cameraId]->max_frame_duration, 1);
6284
6285    camera_metadata_rational baseGainFactor = {
6286            gCamCapability[cameraId]->base_gain_factor.numerator,
6287            gCamCapability[cameraId]->base_gain_factor.denominator};
6288    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6289                      &baseGainFactor, 1);
6290
6291    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6292                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6293
6294    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6295            gCamCapability[cameraId]->pixel_array_size.height};
6296    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6297                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6298
6299    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6300            gCamCapability[cameraId]->active_array_size.top,
6301            gCamCapability[cameraId]->active_array_size.width,
6302            gCamCapability[cameraId]->active_array_size.height};
6303    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6304            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6305
6306    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6307            &gCamCapability[cameraId]->white_level, 1);
6308
6309    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6310            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6311
6312    bool hasBlackRegions = false;
6313    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6314        LOGW("black_region_count: %d is bounded to %d",
6315            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6316        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6317    }
6318    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6319        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6320        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6321            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6322        }
6323        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6324                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6325        hasBlackRegions = true;
6326    }
6327
6328    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6329            &gCamCapability[cameraId]->flash_charge_duration, 1);
6330
6331    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6332            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6333
6334    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
6335    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6336            &timestampSource, 1);
6337
6338    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6339            &gCamCapability[cameraId]->histogram_size, 1);
6340
6341    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6342            &gCamCapability[cameraId]->max_histogram_count, 1);
6343
6344    int32_t sharpness_map_size[] = {
6345            gCamCapability[cameraId]->sharpness_map_size.width,
6346            gCamCapability[cameraId]->sharpness_map_size.height};
6347
6348    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6349            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6350
6351    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6352            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6353
6354    int32_t scalar_formats[] = {
6355            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6356            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6357            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6358            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6359            HAL_PIXEL_FORMAT_RAW10,
6360            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6361    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6362    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6363                      scalar_formats,
6364                      scalar_formats_count);
6365
6366    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6367    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6368    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6369            count, MAX_SIZES_CNT, available_processed_sizes);
6370    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6371            available_processed_sizes, count * 2);
6372
6373    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6374    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6375    makeTable(gCamCapability[cameraId]->raw_dim,
6376            count, MAX_SIZES_CNT, available_raw_sizes);
6377    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6378            available_raw_sizes, count * 2);
6379
6380    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6381    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6382    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6383            count, MAX_SIZES_CNT, available_fps_ranges);
6384    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6385            available_fps_ranges, count * 2);
6386
6387    camera_metadata_rational exposureCompensationStep = {
6388            gCamCapability[cameraId]->exp_compensation_step.numerator,
6389            gCamCapability[cameraId]->exp_compensation_step.denominator};
6390    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6391                      &exposureCompensationStep, 1);
6392
6393    Vector<uint8_t> availableVstabModes;
6394    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6395    char eis_prop[PROPERTY_VALUE_MAX];
6396    memset(eis_prop, 0, sizeof(eis_prop));
6397    property_get("persist.camera.eis.enable", eis_prop, "0");
6398    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6399    if (facingBack && eis_prop_set) {
6400        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6401    }
6402    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6403                      availableVstabModes.array(), availableVstabModes.size());
6404
6405    /*HAL 1 and HAL 3 common*/
6406    float maxZoom = 4;
6407    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6408            &maxZoom, 1);
6409
6410    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
6411    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6412
6413    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6414    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6415        max3aRegions[2] = 0; /* AF not supported */
6416    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6417            max3aRegions, 3);
6418
6419    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6420    memset(prop, 0, sizeof(prop));
6421    property_get("persist.camera.facedetect", prop, "1");
6422    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6423    LOGD("Support face detection mode: %d",
6424             supportedFaceDetectMode);
6425
6426    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6427    Vector<uint8_t> availableFaceDetectModes;
6428    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6429    if (supportedFaceDetectMode == 1) {
6430        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6431    } else if (supportedFaceDetectMode == 2) {
6432        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6433    } else if (supportedFaceDetectMode == 3) {
6434        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6435        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6436    } else {
6437        maxFaces = 0;
6438    }
6439    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6440            availableFaceDetectModes.array(),
6441            availableFaceDetectModes.size());
6442    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6443            (int32_t *)&maxFaces, 1);
6444
6445    int32_t exposureCompensationRange[] = {
6446            gCamCapability[cameraId]->exposure_compensation_min,
6447            gCamCapability[cameraId]->exposure_compensation_max};
6448    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6449            exposureCompensationRange,
6450            sizeof(exposureCompensationRange)/sizeof(int32_t));
6451
6452    uint8_t lensFacing = (facingBack) ?
6453            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6454    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6455
6456    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6457                      available_thumbnail_sizes,
6458                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6459
6460    /*all sizes will be clubbed into this tag*/
6461    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6462    /*android.scaler.availableStreamConfigurations*/
6463    Vector<int32_t> available_stream_configs;
6464    cam_dimension_t active_array_dim;
6465    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6466    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6467    /* Add input/output stream configurations for each scalar formats*/
6468    for (size_t j = 0; j < scalar_formats_count; j++) {
6469        switch (scalar_formats[j]) {
6470        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6471        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6472        case HAL_PIXEL_FORMAT_RAW10:
6473            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6474                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6475                addStreamConfig(available_stream_configs, scalar_formats[j],
6476                        gCamCapability[cameraId]->raw_dim[i],
6477                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6478            }
6479            break;
6480        case HAL_PIXEL_FORMAT_BLOB:
6481            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6482                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6483                addStreamConfig(available_stream_configs, scalar_formats[j],
6484                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6485                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6486            }
6487            break;
6488        case HAL_PIXEL_FORMAT_YCbCr_420_888:
6489        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6490        default:
6491            cam_dimension_t largest_picture_size;
6492            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6493            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6494                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6495                addStreamConfig(available_stream_configs, scalar_formats[j],
6496                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6497                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6498                /* Book keep largest */
6499                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6500                        >= largest_picture_size.width &&
6501                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
6502                        >= largest_picture_size.height)
6503                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6504            }
6505            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6506            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6507                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6508                 addStreamConfig(available_stream_configs, scalar_formats[j],
6509                         largest_picture_size,
6510                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6511            }
6512            break;
6513        }
6514    }
6515
6516    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6517                      available_stream_configs.array(), available_stream_configs.size());
6518    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6519    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6520
6521    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6522    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6523
6524    /* android.scaler.availableMinFrameDurations */
6525    Vector<int64_t> available_min_durations;
6526    for (size_t j = 0; j < scalar_formats_count; j++) {
6527        switch (scalar_formats[j]) {
6528        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6529        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6530        case HAL_PIXEL_FORMAT_RAW10:
6531            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6532                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6533                available_min_durations.add(scalar_formats[j]);
6534                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
6535                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
6536                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
6537            }
6538            break;
6539        default:
6540            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6541                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6542                available_min_durations.add(scalar_formats[j]);
6543                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
6544                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
6545                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
6546            }
6547            break;
6548        }
6549    }
6550    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6551                      available_min_durations.array(), available_min_durations.size());
6552
6553    Vector<int32_t> available_hfr_configs;
6554    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6555        int32_t fps = 0;
6556        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6557        case CAM_HFR_MODE_60FPS:
6558            fps = 60;
6559            break;
6560        case CAM_HFR_MODE_90FPS:
6561            fps = 90;
6562            break;
6563        case CAM_HFR_MODE_120FPS:
6564            fps = 120;
6565            break;
6566        case CAM_HFR_MODE_150FPS:
6567            fps = 150;
6568            break;
6569        case CAM_HFR_MODE_180FPS:
6570            fps = 180;
6571            break;
6572        case CAM_HFR_MODE_210FPS:
6573            fps = 210;
6574            break;
6575        case CAM_HFR_MODE_240FPS:
6576            fps = 240;
6577            break;
6578        case CAM_HFR_MODE_480FPS:
6579            fps = 480;
6580            break;
6581        case CAM_HFR_MODE_OFF:
6582        case CAM_HFR_MODE_MAX:
6583        default:
6584            break;
6585        }
6586
6587        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6588        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6589            /* For each HFR frame rate, need to advertise one variable fps range
6590             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
6591             * [120, 120]. While camcorder preview alone is running [30, 120] is
6592             * set by the app. When video recording is started, [120, 120] is
6593             * set. This way sensor configuration does not change when recording
6594             * is started */
6595
6596            size_t len = sizeof(default_hfr_video_sizes) / sizeof(default_hfr_video_sizes[0]);
6597            for (size_t j = 0; j < len; j++) {
6598                if ((default_hfr_video_sizes[j].width <= gCamCapability[cameraId]->hfr_tbl[i].dim.width) &&
6599                    (default_hfr_video_sizes[j].height <= gCamCapability[cameraId]->hfr_tbl[i].dim.height)) {
6600                    //TODO: Might need additional filtering based on VFE/CPP/CPU capabilities
6601
6602                    /* (width, height, fps_min, fps_max, batch_size_max) */
6603                    available_hfr_configs.add(default_hfr_video_sizes[j].width);
6604                    available_hfr_configs.add(default_hfr_video_sizes[j].height);
6605                    available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6606                    available_hfr_configs.add(fps);
6607                    available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6608
6609                    /* (width, height, fps_min, fps_max, batch_size_max) */
6610                    available_hfr_configs.add(default_hfr_video_sizes[j].width);
6611                    available_hfr_configs.add(default_hfr_video_sizes[j].height);
6612                    available_hfr_configs.add(fps);
6613                    available_hfr_configs.add(fps);
6614                    available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6615                }// if
6616            }// for (...; j < len;...)
6617       } //if (fps >= MIN_FPS_FOR_BATCH_MODE)
6618    }
6619    //Advertise HFR capability only if the property is set
6620    memset(prop, 0, sizeof(prop));
6621    property_get("persist.camera.hal3hfr.enable", prop, "1");
6622    uint8_t hfrEnable = (uint8_t)atoi(prop);
6623
6624    if(hfrEnable && available_hfr_configs.array()) {
6625        staticInfo.update(
6626                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6627                available_hfr_configs.array(), available_hfr_configs.size());
6628    }
6629
6630    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6631    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6632                      &max_jpeg_size, 1);
6633
6634    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6635    size_t size = 0;
6636    count = CAM_EFFECT_MODE_MAX;
6637    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6638    for (size_t i = 0; i < count; i++) {
6639        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6640                gCamCapability[cameraId]->supported_effects[i]);
6641        if (NAME_NOT_FOUND != val) {
6642            avail_effects[size] = (uint8_t)val;
6643            size++;
6644        }
6645    }
6646    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6647                      avail_effects,
6648                      size);
6649
6650    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6651    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6652    size_t supported_scene_modes_cnt = 0;
6653    count = CAM_SCENE_MODE_MAX;
6654    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6655    for (size_t i = 0; i < count; i++) {
6656        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6657                CAM_SCENE_MODE_OFF) {
6658            int val = lookupFwkName(SCENE_MODES_MAP,
6659                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
6660                    gCamCapability[cameraId]->supported_scene_modes[i]);
6661            if (NAME_NOT_FOUND != val) {
6662                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6663                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6664                supported_scene_modes_cnt++;
6665            }
6666        }
6667    }
6668    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6669                      avail_scene_modes,
6670                      supported_scene_modes_cnt);
6671
6672    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6673    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6674                      supported_scene_modes_cnt,
6675                      CAM_SCENE_MODE_MAX,
6676                      scene_mode_overrides,
6677                      supported_indexes,
6678                      cameraId);
6679
6680    if (supported_scene_modes_cnt == 0) {
6681        supported_scene_modes_cnt = 1;
6682        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6683    }
6684
6685    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6686            scene_mode_overrides, supported_scene_modes_cnt * 3);
6687
6688    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6689                                         ANDROID_CONTROL_MODE_AUTO,
6690                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6691    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6692            available_control_modes,
6693            3);
6694
6695    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6696    size = 0;
6697    count = CAM_ANTIBANDING_MODE_MAX;
6698    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6699    for (size_t i = 0; i < count; i++) {
6700        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6701                gCamCapability[cameraId]->supported_antibandings[i]);
6702        if (NAME_NOT_FOUND != val) {
6703            avail_antibanding_modes[size] = (uint8_t)val;
6704            size++;
6705        }
6706
6707    }
6708    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6709                      avail_antibanding_modes,
6710                      size);
6711
6712    uint8_t avail_abberation_modes[] = {
6713            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
6714            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
6715            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
6716    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6717    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6718    if (0 == count) {
6719        //  If no aberration correction modes are available for a device, this advertise OFF mode
6720        size = 1;
6721    } else {
6722        // If count is not zero then atleast one among the FAST or HIGH quality is supported
6723        // So, advertize all 3 modes if atleast any one mode is supported as per the
6724        // new M requirement
6725        size = 3;
6726    }
6727    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6728            avail_abberation_modes,
6729            size);
6730
6731    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6732    size = 0;
6733    count = CAM_FOCUS_MODE_MAX;
6734    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6735    for (size_t i = 0; i < count; i++) {
6736        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6737                gCamCapability[cameraId]->supported_focus_modes[i]);
6738        if (NAME_NOT_FOUND != val) {
6739            avail_af_modes[size] = (uint8_t)val;
6740            size++;
6741        }
6742    }
6743    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6744                      avail_af_modes,
6745                      size);
6746
6747    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6748    size = 0;
6749    count = CAM_WB_MODE_MAX;
6750    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6751    for (size_t i = 0; i < count; i++) {
6752        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6753                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6754                gCamCapability[cameraId]->supported_white_balances[i]);
6755        if (NAME_NOT_FOUND != val) {
6756            avail_awb_modes[size] = (uint8_t)val;
6757            size++;
6758        }
6759    }
6760    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6761                      avail_awb_modes,
6762                      size);
6763
6764    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6765    count = CAM_FLASH_FIRING_LEVEL_MAX;
6766    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6767            count);
6768    for (size_t i = 0; i < count; i++) {
6769        available_flash_levels[i] =
6770                gCamCapability[cameraId]->supported_firing_levels[i];
6771    }
6772    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6773            available_flash_levels, count);
6774
6775    uint8_t flashAvailable;
6776    if (gCamCapability[cameraId]->flash_available)
6777        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6778    else
6779        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6780    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6781            &flashAvailable, 1);
6782
6783    Vector<uint8_t> avail_ae_modes;
6784    count = CAM_AE_MODE_MAX;
6785    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6786    for (size_t i = 0; i < count; i++) {
6787        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6788    }
6789    if (flashAvailable) {
6790        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6791        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6792    }
6793    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6794                      avail_ae_modes.array(),
6795                      avail_ae_modes.size());
6796
6797    int32_t sensitivity_range[2];
6798    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6799    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6800    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6801                      sensitivity_range,
6802                      sizeof(sensitivity_range) / sizeof(int32_t));
6803
6804    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6805                      &gCamCapability[cameraId]->max_analog_sensitivity,
6806                      1);
6807
6808    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6809    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6810                      &sensor_orientation,
6811                      1);
6812
6813    int32_t max_output_streams[] = {
6814            MAX_STALLING_STREAMS,
6815            MAX_PROCESSED_STREAMS,
6816            MAX_RAW_STREAMS};
6817    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6818            max_output_streams,
6819            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6820
6821    uint8_t avail_leds = 0;
6822    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6823                      &avail_leds, 0);
6824
6825    uint8_t focus_dist_calibrated;
6826    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6827            gCamCapability[cameraId]->focus_dist_calibrated);
6828    if (NAME_NOT_FOUND != val) {
6829        focus_dist_calibrated = (uint8_t)val;
6830        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6831                     &focus_dist_calibrated, 1);
6832    }
6833
6834    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6835    size = 0;
6836    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6837            MAX_TEST_PATTERN_CNT);
6838    for (size_t i = 0; i < count; i++) {
6839        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6840                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6841        if (NAME_NOT_FOUND != testpatternMode) {
6842            avail_testpattern_modes[size] = testpatternMode;
6843            size++;
6844        }
6845    }
6846    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6847                      avail_testpattern_modes,
6848                      size);
6849
6850    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6851    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6852                      &max_pipeline_depth,
6853                      1);
6854
6855    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6856    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6857                      &partial_result_count,
6858                       1);
6859
6860    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6861    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6862
6863    Vector<uint8_t> available_capabilities;
6864    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6865    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6866    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6867    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6868    if (supportBurst) {
6869        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6870    }
6871    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6872    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6873    if (hfrEnable && available_hfr_configs.array()) {
6874        available_capabilities.add(
6875                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6876    }
6877
6878    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6879        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6880    }
6881    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6882            available_capabilities.array(),
6883            available_capabilities.size());
6884
6885    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
6886    //Assumption is that all bayer cameras support MANUAL_SENSOR.
6887    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6888            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6889
6890    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6891            &aeLockAvailable, 1);
6892
6893    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
6894    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
6895    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6896            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6897
6898    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6899            &awbLockAvailable, 1);
6900
6901    int32_t max_input_streams = 1;
6902    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6903                      &max_input_streams,
6904                      1);
6905
6906    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6907    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6908            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6909            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6910            HAL_PIXEL_FORMAT_YCbCr_420_888};
6911    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6912                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6913
6914    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6915    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6916                      &max_latency,
6917                      1);
6918
6919    int32_t isp_sensitivity_range[2];
6920    isp_sensitivity_range[0] =
6921        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
6922    isp_sensitivity_range[1] =
6923        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
6924    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
6925                      isp_sensitivity_range,
6926                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
6927
6928    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6929                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6930    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6931            available_hot_pixel_modes,
6932            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6933
6934    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6935                                         ANDROID_SHADING_MODE_FAST,
6936                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6937    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6938                      available_shading_modes,
6939                      3);
6940
6941    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6942                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6943    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6944                      available_lens_shading_map_modes,
6945                      2);
6946
6947    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6948                                      ANDROID_EDGE_MODE_FAST,
6949                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6950                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6951    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6952            available_edge_modes,
6953            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6954
6955    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6956                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6957                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6958                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6959                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6960    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6961            available_noise_red_modes,
6962            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6963
6964    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6965                                         ANDROID_TONEMAP_MODE_FAST,
6966                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6967    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6968            available_tonemap_modes,
6969            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6970
6971    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6972    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6973            available_hot_pixel_map_modes,
6974            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6975
6976    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6977            gCamCapability[cameraId]->reference_illuminant1);
6978    if (NAME_NOT_FOUND != val) {
6979        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6980        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6981    }
6982
6983    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6984            gCamCapability[cameraId]->reference_illuminant2);
6985    if (NAME_NOT_FOUND != val) {
6986        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6987        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6988    }
6989
6990    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6991            (void *)gCamCapability[cameraId]->forward_matrix1,
6992            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6993
6994    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6995            (void *)gCamCapability[cameraId]->forward_matrix2,
6996            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6997
6998    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6999            (void *)gCamCapability[cameraId]->color_transform1,
7000            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7001
7002    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7003            (void *)gCamCapability[cameraId]->color_transform2,
7004            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7005
7006    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7007            (void *)gCamCapability[cameraId]->calibration_transform1,
7008            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7009
7010    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7011            (void *)gCamCapability[cameraId]->calibration_transform2,
7012            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7013
7014    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7015       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7016       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7017       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7018       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7019       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7020       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7021       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7022       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7023       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7024       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7025       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7026       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7027       ANDROID_JPEG_GPS_COORDINATES,
7028       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7029       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7030       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7031       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7032       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7033       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7034       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7035       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7036       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7037       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7038       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7039       ANDROID_STATISTICS_FACE_DETECT_MODE,
7040       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7041       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7042       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7043       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE};
7044
7045    size_t request_keys_cnt =
7046            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7047    Vector<int32_t> available_request_keys;
7048    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7049    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7050        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7051    }
7052
7053    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7054            available_request_keys.array(), available_request_keys.size());
7055
7056    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7057       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7058       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7059       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7060       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7061       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7062       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7063       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7064       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7065       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7066       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7067       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7068       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7069       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7070       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7071       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7072       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7073       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7074       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7075       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7076       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7077       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7078       ANDROID_STATISTICS_FACE_SCORES,
7079       ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
7080       ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7081       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
7082    size_t result_keys_cnt =
7083            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7084
7085    Vector<int32_t> available_result_keys;
7086    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7087    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7088        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7089    }
7090    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7091        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7092        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7093    }
7094    if (supportedFaceDetectMode == 1) {
7095        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7096        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7097    } else if ((supportedFaceDetectMode == 2) ||
7098            (supportedFaceDetectMode == 3)) {
7099        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7100        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7101    }
7102    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7103            available_result_keys.array(), available_result_keys.size());
7104
7105    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7106       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7107       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7108       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7109       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7110       ANDROID_SCALER_CROPPING_TYPE,
7111       ANDROID_SYNC_MAX_LATENCY,
7112       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7113       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7114       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7115       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7116       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7117       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7118       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7119       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7120       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7121       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7122       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7123       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7124       ANDROID_LENS_FACING,
7125       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7126       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7127       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7128       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7129       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7130       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7131       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7132       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7133       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7134       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7135       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7136       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7137       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7138       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7139       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7140       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7141       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7142       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7143       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7144       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7145       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7146       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7147       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7148       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7149       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7150       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7151       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7152       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7153       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7154       ANDROID_CONTROL_AVAILABLE_MODES,
7155       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7156       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7157       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7158       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7159       ANDROID_SHADING_AVAILABLE_MODES,
7160       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7161       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7162
7163    Vector<int32_t> available_characteristics_keys;
7164    available_characteristics_keys.appendArray(characteristics_keys_basic,
7165            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7166    if (hasBlackRegions) {
7167        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7168    }
7169    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7170                      available_characteristics_keys.array(),
7171                      available_characteristics_keys.size());
7172
7173    /*available stall durations depend on the hw + sw and will be different for different devices */
7174    /*have to add for raw after implementation*/
7175    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7176    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7177
7178    Vector<int64_t> available_stall_durations;
7179    for (uint32_t j = 0; j < stall_formats_count; j++) {
7180        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7181            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7182                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7183                available_stall_durations.add(stall_formats[j]);
7184                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7185                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7186                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7187          }
7188        } else {
7189            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7190                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7191                available_stall_durations.add(stall_formats[j]);
7192                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7193                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7194                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7195            }
7196        }
7197    }
7198    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7199                      available_stall_durations.array(),
7200                      available_stall_durations.size());
7201
7202    //QCAMERA3_OPAQUE_RAW
7203    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7204    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7205    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7206    case LEGACY_RAW:
7207        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7208            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7209        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7210            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7211        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7212            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7213        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7214        break;
7215    case MIPI_RAW:
7216        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7217            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7218        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7219            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7220        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7221            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7222        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7223        break;
7224    default:
7225        LOGE("unknown opaque_raw_format %d",
7226                gCamCapability[cameraId]->opaque_raw_fmt);
7227        break;
7228    }
7229    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7230
7231    Vector<int32_t> strides;
7232    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7233            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7234        cam_stream_buf_plane_info_t buf_planes;
7235        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7236        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7237        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7238            &gCamCapability[cameraId]->padding_info, &buf_planes);
7239        strides.add(buf_planes.plane_info.mp[0].stride);
7240    }
7241    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7242            strides.size());
7243
7244    Vector<int32_t> opaque_size;
7245    for (size_t j = 0; j < scalar_formats_count; j++) {
7246        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7247            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7248                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7249                cam_stream_buf_plane_info_t buf_planes;
7250
7251                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7252                         &gCamCapability[cameraId]->padding_info, &buf_planes);
7253
7254                if (rc == 0) {
7255                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7256                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7257                    opaque_size.add(buf_planes.plane_info.frame_len);
7258                }else {
7259                    LOGE("raw frame calculation failed!");
7260                }
7261            }
7262        }
7263    }
7264
7265    if ((opaque_size.size() > 0) &&
7266            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7267        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7268    else
7269        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7270
7271    gStaticMetadata[cameraId] = staticInfo.release();
7272    return rc;
7273}
7274
7275/*===========================================================================
7276 * FUNCTION   : makeTable
7277 *
7278 * DESCRIPTION: make a table of sizes
7279 *
7280 * PARAMETERS :
7281 *
7282 *
7283 *==========================================================================*/
7284void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7285        size_t max_size, int32_t *sizeTable)
7286{
7287    size_t j = 0;
7288    if (size > max_size) {
7289       size = max_size;
7290    }
7291    for (size_t i = 0; i < size; i++) {
7292        sizeTable[j] = dimTable[i].width;
7293        sizeTable[j+1] = dimTable[i].height;
7294        j+=2;
7295    }
7296}
7297
7298/*===========================================================================
7299 * FUNCTION   : makeFPSTable
7300 *
7301 * DESCRIPTION: make a table of fps ranges
7302 *
7303 * PARAMETERS :
7304 *
7305 *==========================================================================*/
7306void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7307        size_t max_size, int32_t *fpsRangesTable)
7308{
7309    size_t j = 0;
7310    if (size > max_size) {
7311       size = max_size;
7312    }
7313    for (size_t i = 0; i < size; i++) {
7314        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7315        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7316        j+=2;
7317    }
7318}
7319
7320/*===========================================================================
7321 * FUNCTION   : makeOverridesList
7322 *
7323 * DESCRIPTION: make a list of scene mode overrides
7324 *
7325 * PARAMETERS :
7326 *
7327 *
7328 *==========================================================================*/
7329void QCamera3HardwareInterface::makeOverridesList(
7330        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7331        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7332{
7333    /*daemon will give a list of overrides for all scene modes.
7334      However we should send the fwk only the overrides for the scene modes
7335      supported by the framework*/
7336    size_t j = 0;
7337    if (size > max_size) {
7338       size = max_size;
7339    }
7340    size_t focus_count = CAM_FOCUS_MODE_MAX;
7341    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7342            focus_count);
7343    for (size_t i = 0; i < size; i++) {
7344        bool supt = false;
7345        size_t index = supported_indexes[i];
7346        overridesList[j] = gCamCapability[camera_id]->flash_available ?
7347                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7348        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7349                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7350                overridesTable[index].awb_mode);
7351        if (NAME_NOT_FOUND != val) {
7352            overridesList[j+1] = (uint8_t)val;
7353        }
7354        uint8_t focus_override = overridesTable[index].af_mode;
7355        for (size_t k = 0; k < focus_count; k++) {
7356           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7357              supt = true;
7358              break;
7359           }
7360        }
7361        if (supt) {
7362            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7363                    focus_override);
7364            if (NAME_NOT_FOUND != val) {
7365                overridesList[j+2] = (uint8_t)val;
7366            }
7367        } else {
7368           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7369        }
7370        j+=3;
7371    }
7372}
7373
7374/*===========================================================================
7375 * FUNCTION   : filterJpegSizes
7376 *
7377 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7378 *              could be downscaled to
7379 *
7380 * PARAMETERS :
7381 *
7382 * RETURN     : length of jpegSizes array
7383 *==========================================================================*/
7384
7385size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7386        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7387        uint8_t downscale_factor)
7388{
7389    if (0 == downscale_factor) {
7390        downscale_factor = 1;
7391    }
7392
7393    int32_t min_width = active_array_size.width / downscale_factor;
7394    int32_t min_height = active_array_size.height / downscale_factor;
7395    size_t jpegSizesCnt = 0;
7396    if (processedSizesCnt > maxCount) {
7397        processedSizesCnt = maxCount;
7398    }
7399    for (size_t i = 0; i < processedSizesCnt; i+=2) {
7400        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7401            jpegSizes[jpegSizesCnt] = processedSizes[i];
7402            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7403            jpegSizesCnt += 2;
7404        }
7405    }
7406    return jpegSizesCnt;
7407}
7408
7409/*===========================================================================
7410 * FUNCTION   : computeNoiseModelEntryS
7411 *
7412 * DESCRIPTION: function to map a given sensitivity to the S noise
7413 *              model parameters in the DNG noise model.
7414 *
7415 * PARAMETERS : sens : the sensor sensitivity
7416 *
7417 ** RETURN    : S (sensor amplification) noise
7418 *
7419 *==========================================================================*/
7420double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7421    double s = gCamCapability[mCameraId]->gradient_S * sens +
7422            gCamCapability[mCameraId]->offset_S;
7423    return ((s < 0.0) ? 0.0 : s);
7424}
7425
7426/*===========================================================================
7427 * FUNCTION   : computeNoiseModelEntryO
7428 *
7429 * DESCRIPTION: function to map a given sensitivity to the O noise
7430 *              model parameters in the DNG noise model.
7431 *
7432 * PARAMETERS : sens : the sensor sensitivity
7433 *
7434 ** RETURN    : O (sensor readout) noise
7435 *
7436 *==========================================================================*/
7437double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7438    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7439    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7440            1.0 : (1.0 * sens / max_analog_sens);
7441    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7442            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7443    return ((o < 0.0) ? 0.0 : o);
7444}
7445
7446/*===========================================================================
7447 * FUNCTION   : getSensorSensitivity
7448 *
7449 * DESCRIPTION: convert iso_mode to an integer value
7450 *
7451 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7452 *
7453 ** RETURN    : sensitivity supported by sensor
7454 *
7455 *==========================================================================*/
7456int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7457{
7458    int32_t sensitivity;
7459
7460    switch (iso_mode) {
7461    case CAM_ISO_MODE_100:
7462        sensitivity = 100;
7463        break;
7464    case CAM_ISO_MODE_200:
7465        sensitivity = 200;
7466        break;
7467    case CAM_ISO_MODE_400:
7468        sensitivity = 400;
7469        break;
7470    case CAM_ISO_MODE_800:
7471        sensitivity = 800;
7472        break;
7473    case CAM_ISO_MODE_1600:
7474        sensitivity = 1600;
7475        break;
7476    default:
7477        sensitivity = -1;
7478        break;
7479    }
7480    return sensitivity;
7481}
7482
7483/*===========================================================================
7484 * FUNCTION   : getCamInfo
7485 *
7486 * DESCRIPTION: query camera capabilities
7487 *
7488 * PARAMETERS :
7489 *   @cameraId  : camera Id
7490 *   @info      : camera info struct to be filled in with camera capabilities
7491 *
7492 * RETURN     : int type of status
7493 *              NO_ERROR  -- success
7494 *              none-zero failure code
7495 *==========================================================================*/
7496int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7497        struct camera_info *info)
7498{
7499    ATRACE_CALL();
7500    int rc = 0;
7501
7502    pthread_mutex_lock(&gCamLock);
7503    if (NULL == gCamCapability[cameraId]) {
7504        rc = initCapabilities(cameraId);
7505        if (rc < 0) {
7506            pthread_mutex_unlock(&gCamLock);
7507            return rc;
7508        }
7509    }
7510
7511    if (NULL == gStaticMetadata[cameraId]) {
7512        rc = initStaticMetadata(cameraId);
7513        if (rc < 0) {
7514            pthread_mutex_unlock(&gCamLock);
7515            return rc;
7516        }
7517    }
7518
7519    switch(gCamCapability[cameraId]->position) {
7520    case CAM_POSITION_BACK:
7521        info->facing = CAMERA_FACING_BACK;
7522        break;
7523
7524    case CAM_POSITION_FRONT:
7525        info->facing = CAMERA_FACING_FRONT;
7526        break;
7527
7528    default:
7529        LOGE("Unknown position type for camera id:%d", cameraId);
7530        rc = -1;
7531        break;
7532    }
7533
7534
7535    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7536    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
7537    info->static_camera_characteristics = gStaticMetadata[cameraId];
7538
7539    //For now assume both cameras can operate independently.
7540    info->conflicting_devices = NULL;
7541    info->conflicting_devices_length = 0;
7542
7543    //resource cost is 100 * MIN(1.0, m/M),
7544    //where m is throughput requirement with maximum stream configuration
7545    //and M is CPP maximum throughput.
7546    float max_fps = 0.0;
7547    for (uint32_t i = 0;
7548            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7549        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7550            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7551    }
7552    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7553            gCamCapability[cameraId]->active_array_size.width *
7554            gCamCapability[cameraId]->active_array_size.height * max_fps /
7555            gCamCapability[cameraId]->max_pixel_bandwidth;
7556    info->resource_cost = 100 * MIN(1.0, ratio);
7557    LOGI("camera %d resource cost is %d", cameraId,
7558            info->resource_cost);
7559
7560    pthread_mutex_unlock(&gCamLock);
7561    return rc;
7562}
7563
7564/*===========================================================================
7565 * FUNCTION   : translateCapabilityToMetadata
7566 *
7567 * DESCRIPTION: translate the capability into camera_metadata_t
7568 *
7569 * PARAMETERS : type of the request
7570 *
7571 *
7572 * RETURN     : success: camera_metadata_t*
7573 *              failure: NULL
7574 *
7575 *==========================================================================*/
7576camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7577{
7578    if (mDefaultMetadata[type] != NULL) {
7579        return mDefaultMetadata[type];
7580    }
7581    //first time we are handling this request
7582    //fill up the metadata structure using the wrapper class
7583    CameraMetadata settings;
7584    //translate from cam_capability_t to camera_metadata_tag_t
7585    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7586    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7587    int32_t defaultRequestID = 0;
7588    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7589
7590    /* OIS disable */
7591    char ois_prop[PROPERTY_VALUE_MAX];
7592    memset(ois_prop, 0, sizeof(ois_prop));
7593    property_get("persist.camera.ois.disable", ois_prop, "0");
7594    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7595
7596    /* Force video to use OIS */
7597    char videoOisProp[PROPERTY_VALUE_MAX];
7598    memset(videoOisProp, 0, sizeof(videoOisProp));
7599    property_get("persist.camera.ois.video", videoOisProp, "1");
7600    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7601
7602    // EIS enable/disable
7603    char eis_prop[PROPERTY_VALUE_MAX];
7604    memset(eis_prop, 0, sizeof(eis_prop));
7605    property_get("persist.camera.eis.enable", eis_prop, "0");
7606    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7607
7608    // Hybrid AE enable/disable
7609    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
7610    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
7611    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
7612    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
7613
7614    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7615    // This is a bit hacky. EIS is enabled only when the above setprop
7616    // is set to non-zero value and on back camera (for 2015 Nexus).
7617    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7618    // configureStream is called before this function. In other words,
7619    // we cannot guarantee the app will call configureStream before
7620    // calling createDefaultRequest.
7621    const bool eisEnabled = facingBack && eis_prop_set;
7622
7623    uint8_t controlIntent = 0;
7624    uint8_t focusMode;
7625    uint8_t vsMode;
7626    uint8_t optStabMode;
7627    uint8_t cacMode;
7628    uint8_t edge_mode;
7629    uint8_t noise_red_mode;
7630    uint8_t tonemap_mode;
7631    bool highQualityModeEntryAvailable = FALSE;
7632    bool fastModeEntryAvailable = FALSE;
7633    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7634    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7635    switch (type) {
7636      case CAMERA3_TEMPLATE_PREVIEW:
7637        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7638        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7639        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7640        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7641        edge_mode = ANDROID_EDGE_MODE_FAST;
7642        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7643        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7644        break;
7645      case CAMERA3_TEMPLATE_STILL_CAPTURE:
7646        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7647        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7648        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7649        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7650        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7651        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7652        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7653        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
7654        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
7655            if (gCamCapability[mCameraId]->aberration_modes[i] ==
7656                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
7657                highQualityModeEntryAvailable = TRUE;
7658            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
7659                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
7660                fastModeEntryAvailable = TRUE;
7661            }
7662        }
7663        if (highQualityModeEntryAvailable) {
7664            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7665        } else if (fastModeEntryAvailable) {
7666            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7667        }
7668        break;
7669      case CAMERA3_TEMPLATE_VIDEO_RECORD:
7670        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7671        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7672        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7673        if (eisEnabled) {
7674            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7675        }
7676        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7677        edge_mode = ANDROID_EDGE_MODE_FAST;
7678        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7679        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7680        if (forceVideoOis)
7681            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7682        break;
7683      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7684        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7685        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7686        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7687        if (eisEnabled) {
7688            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7689        }
7690        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7691        edge_mode = ANDROID_EDGE_MODE_FAST;
7692        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7693        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7694        if (forceVideoOis)
7695            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7696        break;
7697      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7698        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7699        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7700        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7701        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7702        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7703        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7704        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7705        break;
7706      case CAMERA3_TEMPLATE_MANUAL:
7707        edge_mode = ANDROID_EDGE_MODE_FAST;
7708        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7709        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7710        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7711        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7712        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7713        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7714        break;
7715      default:
7716        edge_mode = ANDROID_EDGE_MODE_FAST;
7717        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7718        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7719        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7720        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7721        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7722        break;
7723    }
7724    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7725    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7726    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7727    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7728        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7729    }
7730    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7731
7732    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7733            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7734        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7735    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7736            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7737            || ois_disable)
7738        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7739    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7740
7741    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7742            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7743
7744    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7745    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7746
7747    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7748    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7749
7750    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7751    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7752
7753    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7754    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7755
7756    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7757    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7758
7759    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7760    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7761
7762    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7763    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7764
7765    /*flash*/
7766    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7767    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7768
7769    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7770    settings.update(ANDROID_FLASH_FIRING_POWER,
7771            &flashFiringLevel, 1);
7772
7773    /* lens */
7774    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7775    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7776
7777    if (gCamCapability[mCameraId]->filter_densities_count) {
7778        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7779        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7780                        gCamCapability[mCameraId]->filter_densities_count);
7781    }
7782
7783    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7784    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7785
7786    float default_focus_distance = 0;
7787    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7788
7789    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7790    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7791
7792    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7793    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7794
7795    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7796    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7797
7798    /* face detection (default to OFF) */
7799    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7800    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7801
7802    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7803    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7804
7805    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7806    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7807
7808    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7809    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7810
7811    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7812    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7813
7814    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7815    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7816
7817    /* Exposure time(Update the Min Exposure Time)*/
7818    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7819    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7820
7821    /* frame duration */
7822    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7823    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7824
7825    /* sensitivity */
7826    static const int32_t default_sensitivity = 100;
7827    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7828
7829    /*edge mode*/
7830    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7831
7832    /*noise reduction mode*/
7833    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7834
7835    /*color correction mode*/
7836    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7837    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7838
7839    /*transform matrix mode*/
7840    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7841
7842    int32_t scaler_crop_region[4];
7843    scaler_crop_region[0] = 0;
7844    scaler_crop_region[1] = 0;
7845    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7846    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7847    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7848
7849    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7850    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7851
7852    /*focus distance*/
7853    float focus_distance = 0.0;
7854    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7855
7856    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7857    float max_range = 0.0;
7858    float max_fixed_fps = 0.0;
7859    int32_t fps_range[2] = {0, 0};
7860    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7861            i++) {
7862        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7863            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7864        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7865                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7866                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7867            if (range > max_range) {
7868                fps_range[0] =
7869                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7870                fps_range[1] =
7871                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7872                max_range = range;
7873            }
7874        } else {
7875            if (range < 0.01 && max_fixed_fps <
7876                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7877                fps_range[0] =
7878                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7879                fps_range[1] =
7880                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7881                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7882            }
7883        }
7884    }
7885    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7886
7887    /*precapture trigger*/
7888    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7889    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7890
7891    /*af trigger*/
7892    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7893    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7894
7895    /* ae & af regions */
7896    int32_t active_region[] = {
7897            gCamCapability[mCameraId]->active_array_size.left,
7898            gCamCapability[mCameraId]->active_array_size.top,
7899            gCamCapability[mCameraId]->active_array_size.left +
7900                    gCamCapability[mCameraId]->active_array_size.width,
7901            gCamCapability[mCameraId]->active_array_size.top +
7902                    gCamCapability[mCameraId]->active_array_size.height,
7903            0};
7904    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7905            sizeof(active_region) / sizeof(active_region[0]));
7906    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7907            sizeof(active_region) / sizeof(active_region[0]));
7908
7909    /* black level lock */
7910    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7911    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7912
7913    /* lens shading map mode */
7914    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7915    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7916        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7917    }
7918    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7919
7920    //special defaults for manual template
7921    if (type == CAMERA3_TEMPLATE_MANUAL) {
7922        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7923        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7924
7925        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7926        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7927
7928        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7929        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7930
7931        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7932        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7933
7934        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7935        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7936
7937        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7938        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7939    }
7940
7941
7942    /* TNR
7943     * We'll use this location to determine which modes TNR will be set.
7944     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7945     * This is not to be confused with linking on a per stream basis that decision
7946     * is still on per-session basis and will be handled as part of config stream
7947     */
7948    uint8_t tnr_enable = 0;
7949
7950    if (m_bTnrPreview || m_bTnrVideo) {
7951
7952        switch (type) {
7953            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7954            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7955                    tnr_enable = 1;
7956                    break;
7957
7958            default:
7959                    tnr_enable = 0;
7960                    break;
7961        }
7962
7963        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7964        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7965        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7966
7967        LOGD("TNR:%d with process plate %d for template:%d",
7968                             tnr_enable, tnr_process_type, type);
7969    }
7970
7971    /* CDS default */
7972    char prop[PROPERTY_VALUE_MAX];
7973    memset(prop, 0, sizeof(prop));
7974    property_get("persist.camera.CDS", prop, "Auto");
7975    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7976    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7977    if (CAM_CDS_MODE_MAX == cds_mode) {
7978        cds_mode = CAM_CDS_MODE_AUTO;
7979    }
7980
7981    /* Disabling CDS in templates which have TNR enabled*/
7982    if (tnr_enable)
7983        cds_mode = CAM_CDS_MODE_OFF;
7984
7985    int32_t mode = cds_mode;
7986    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7987
7988    /* hybrid ae */
7989    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
7990
7991    mDefaultMetadata[type] = settings.release();
7992
7993    return mDefaultMetadata[type];
7994}
7995
7996/*===========================================================================
7997 * FUNCTION   : setFrameParameters
7998 *
7999 * DESCRIPTION: set parameters per frame as requested in the metadata from
8000 *              framework
8001 *
8002 * PARAMETERS :
8003 *   @request   : request that needs to be serviced
8004 *   @streamID : Stream ID of all the requested streams
8005 *   @blob_request: Whether this request is a blob request or not
8006 *
8007 * RETURN     : success: NO_ERROR
8008 *              failure:
8009 *==========================================================================*/
8010int QCamera3HardwareInterface::setFrameParameters(
8011                    camera3_capture_request_t *request,
8012                    cam_stream_ID_t streamID,
8013                    int blob_request,
8014                    uint32_t snapshotStreamId)
8015{
8016    /*translate from camera_metadata_t type to parm_type_t*/
8017    int rc = 0;
8018    int32_t hal_version = CAM_HAL_V3;
8019
8020    clear_metadata_buffer(mParameters);
8021    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8022        LOGE("Failed to set hal version in the parameters");
8023        return BAD_VALUE;
8024    }
8025
8026    /*we need to update the frame number in the parameters*/
8027    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8028            request->frame_number)) {
8029        LOGE("Failed to set the frame number in the parameters");
8030        return BAD_VALUE;
8031    }
8032
8033    /* Update stream id of all the requested buffers */
8034    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8035        LOGE("Failed to set stream type mask in the parameters");
8036        return BAD_VALUE;
8037    }
8038
8039    if (mUpdateDebugLevel) {
8040        uint32_t dummyDebugLevel = 0;
8041        /* The value of dummyDebugLevel is irrelavent. On
8042         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8043        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8044                dummyDebugLevel)) {
8045            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8046            return BAD_VALUE;
8047        }
8048        mUpdateDebugLevel = false;
8049    }
8050
8051    if(request->settings != NULL){
8052        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8053        if (blob_request)
8054            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8055    }
8056
8057    return rc;
8058}
8059
8060/*===========================================================================
8061 * FUNCTION   : setReprocParameters
8062 *
8063 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8064 *              return it.
8065 *
8066 * PARAMETERS :
8067 *   @request   : request that needs to be serviced
8068 *
8069 * RETURN     : success: NO_ERROR
8070 *              failure:
8071 *==========================================================================*/
8072int32_t QCamera3HardwareInterface::setReprocParameters(
8073        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8074        uint32_t snapshotStreamId)
8075{
8076    /*translate from camera_metadata_t type to parm_type_t*/
8077    int rc = 0;
8078
8079    if (NULL == request->settings){
8080        LOGE("Reprocess settings cannot be NULL");
8081        return BAD_VALUE;
8082    }
8083
8084    if (NULL == reprocParam) {
8085        LOGE("Invalid reprocessing metadata buffer");
8086        return BAD_VALUE;
8087    }
8088    clear_metadata_buffer(reprocParam);
8089
8090    /*we need to update the frame number in the parameters*/
8091    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8092            request->frame_number)) {
8093        LOGE("Failed to set the frame number in the parameters");
8094        return BAD_VALUE;
8095    }
8096
8097    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8098    if (rc < 0) {
8099        LOGE("Failed to translate reproc request");
8100        return rc;
8101    }
8102
8103    CameraMetadata frame_settings;
8104    frame_settings = request->settings;
8105    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8106            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8107        int32_t *crop_count =
8108                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8109        int32_t *crop_data =
8110                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8111        int32_t *roi_map =
8112                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8113        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8114            cam_crop_data_t crop_meta;
8115            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8116            crop_meta.num_of_streams = 1;
8117            crop_meta.crop_info[0].crop.left   = crop_data[0];
8118            crop_meta.crop_info[0].crop.top    = crop_data[1];
8119            crop_meta.crop_info[0].crop.width  = crop_data[2];
8120            crop_meta.crop_info[0].crop.height = crop_data[3];
8121
8122            crop_meta.crop_info[0].roi_map.left =
8123                    roi_map[0];
8124            crop_meta.crop_info[0].roi_map.top =
8125                    roi_map[1];
8126            crop_meta.crop_info[0].roi_map.width =
8127                    roi_map[2];
8128            crop_meta.crop_info[0].roi_map.height =
8129                    roi_map[3];
8130
8131            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8132                rc = BAD_VALUE;
8133            }
8134            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8135                    request->input_buffer->stream,
8136                    crop_meta.crop_info[0].crop.left,
8137                    crop_meta.crop_info[0].crop.top,
8138                    crop_meta.crop_info[0].crop.width,
8139                    crop_meta.crop_info[0].crop.height);
8140            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8141                    request->input_buffer->stream,
8142                    crop_meta.crop_info[0].roi_map.left,
8143                    crop_meta.crop_info[0].roi_map.top,
8144                    crop_meta.crop_info[0].roi_map.width,
8145                    crop_meta.crop_info[0].roi_map.height);
8146            } else {
8147                LOGE("Invalid reprocess crop count %d!", *crop_count);
8148            }
8149    } else {
8150        LOGE("No crop data from matching output stream");
8151    }
8152
8153    /* These settings are not needed for regular requests so handle them specially for
8154       reprocess requests; information needed for EXIF tags */
8155    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8156        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8157                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8158        if (NAME_NOT_FOUND != val) {
8159            uint32_t flashMode = (uint32_t)val;
8160            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8161                rc = BAD_VALUE;
8162            }
8163        } else {
8164            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8165                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8166        }
8167    } else {
8168        LOGH("No flash mode in reprocess settings");
8169    }
8170
8171    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8172        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8173        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8174            rc = BAD_VALUE;
8175        }
8176    } else {
8177        LOGH("No flash state in reprocess settings");
8178    }
8179
8180    return rc;
8181}
8182
8183/*===========================================================================
8184 * FUNCTION   : saveRequestSettings
8185 *
8186 * DESCRIPTION: Add any settings that might have changed to the request settings
8187 *              and save the settings to be applied on the frame
8188 *
8189 * PARAMETERS :
8190 *   @jpegMetadata : the extracted and/or modified jpeg metadata
8191 *   @request      : request with initial settings
8192 *
8193 * RETURN     :
8194 * camera_metadata_t* : pointer to the saved request settings
8195 *==========================================================================*/
8196camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8197        const CameraMetadata &jpegMetadata,
8198        camera3_capture_request_t *request)
8199{
8200    camera_metadata_t *resultMetadata;
8201    CameraMetadata camMetadata;
8202    camMetadata = request->settings;
8203
8204    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8205        int32_t thumbnail_size[2];
8206        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8207        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8208        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8209                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8210    }
8211
8212    resultMetadata = camMetadata.release();
8213    return resultMetadata;
8214}
8215
8216/*===========================================================================
8217 * FUNCTION   : setHalFpsRange
8218 *
8219 * DESCRIPTION: set FPS range parameter
8220 *
8221 *
8222 * PARAMETERS :
8223 *   @settings    : Metadata from framework
8224 *   @hal_metadata: Metadata buffer
8225 *
8226 *
8227 * RETURN     : success: NO_ERROR
8228 *              failure:
8229 *==========================================================================*/
8230int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8231        metadata_buffer_t *hal_metadata)
8232{
8233    int32_t rc = NO_ERROR;
8234    cam_fps_range_t fps_range;
8235    fps_range.min_fps = (float)
8236            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8237    fps_range.max_fps = (float)
8238            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8239    fps_range.video_min_fps = fps_range.min_fps;
8240    fps_range.video_max_fps = fps_range.max_fps;
8241
8242    LOGD("aeTargetFpsRange fps: [%f %f]",
8243            fps_range.min_fps, fps_range.max_fps);
8244    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8245     * follows:
8246     * ---------------------------------------------------------------|
8247     *      Video stream is absent in configure_streams               |
8248     *    (Camcorder preview before the first video record            |
8249     * ---------------------------------------------------------------|
8250     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8251     *                   |             |             | vid_min/max_fps|
8252     * ---------------------------------------------------------------|
8253     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8254     *                   |-------------|-------------|----------------|
8255     *                   |  [240, 240] |     240     |  [240, 240]    |
8256     * ---------------------------------------------------------------|
8257     *     Video stream is present in configure_streams               |
8258     * ---------------------------------------------------------------|
8259     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8260     *                   |             |             | vid_min/max_fps|
8261     * ---------------------------------------------------------------|
8262     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8263     * (camcorder prev   |-------------|-------------|----------------|
8264     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
8265     *  is stopped)      |             |             |                |
8266     * ---------------------------------------------------------------|
8267     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
8268     *                   |-------------|-------------|----------------|
8269     *                   |  [240, 240] |     240     |  [240, 240]    |
8270     * ---------------------------------------------------------------|
8271     * When Video stream is absent in configure_streams,
8272     * preview fps = sensor_fps / batchsize
8273     * Eg: for 240fps at batchSize 4, preview = 60fps
8274     *     for 120fps at batchSize 4, preview = 30fps
8275     *
8276     * When video stream is present in configure_streams, preview fps is as per
8277     * the ratio of preview buffers to video buffers requested in process
8278     * capture request
8279     */
8280    mBatchSize = 0;
8281    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8282        fps_range.min_fps = fps_range.video_max_fps;
8283        fps_range.video_min_fps = fps_range.video_max_fps;
8284        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8285                fps_range.max_fps);
8286        if (NAME_NOT_FOUND != val) {
8287            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8288            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8289                return BAD_VALUE;
8290            }
8291
8292            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8293                /* If batchmode is currently in progress and the fps changes,
8294                 * set the flag to restart the sensor */
8295                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8296                        (mHFRVideoFps != fps_range.max_fps)) {
8297                    mNeedSensorRestart = true;
8298                }
8299                mHFRVideoFps = fps_range.max_fps;
8300                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8301                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8302                    mBatchSize = MAX_HFR_BATCH_SIZE;
8303                }
8304             }
8305            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
8306
8307         }
8308    } else {
8309        /* HFR mode is session param in backend/ISP. This should be reset when
8310         * in non-HFR mode  */
8311        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8312        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8313            return BAD_VALUE;
8314        }
8315    }
8316    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8317        return BAD_VALUE;
8318    }
8319    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
8320            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8321    return rc;
8322}
8323
8324/*===========================================================================
8325 * FUNCTION   : translateToHalMetadata
8326 *
8327 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
8328 *
8329 *
8330 * PARAMETERS :
8331 *   @request  : request sent from framework
8332 *
8333 *
8334 * RETURN     : success: NO_ERROR
8335 *              failure:
8336 *==========================================================================*/
8337int QCamera3HardwareInterface::translateToHalMetadata
8338                                  (const camera3_capture_request_t *request,
8339                                   metadata_buffer_t *hal_metadata,
8340                                   uint32_t snapshotStreamId)
8341{
8342    int rc = 0;
8343    CameraMetadata frame_settings;
8344    frame_settings = request->settings;
8345
8346    /* Do not change the order of the following list unless you know what you are
8347     * doing.
8348     * The order is laid out in such a way that parameters in the front of the table
8349     * may be used to override the parameters later in the table. Examples are:
8350     * 1. META_MODE should precede AEC/AWB/AF MODE
8351     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
8352     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
8353     * 4. Any mode should precede it's corresponding settings
8354     */
8355    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
8356        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
8357        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
8358            rc = BAD_VALUE;
8359        }
8360        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8361        if (rc != NO_ERROR) {
8362            LOGE("extractSceneMode failed");
8363        }
8364    }
8365
8366    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8367        uint8_t fwk_aeMode =
8368            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8369        uint8_t aeMode;
8370        int32_t redeye;
8371
8372        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8373            aeMode = CAM_AE_MODE_OFF;
8374        } else {
8375            aeMode = CAM_AE_MODE_ON;
8376        }
8377        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8378            redeye = 1;
8379        } else {
8380            redeye = 0;
8381        }
8382
8383        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8384                fwk_aeMode);
8385        if (NAME_NOT_FOUND != val) {
8386            int32_t flashMode = (int32_t)val;
8387            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8388        }
8389
8390        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8391        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8392            rc = BAD_VALUE;
8393        }
8394    }
8395
8396    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8397        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8398        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8399                fwk_whiteLevel);
8400        if (NAME_NOT_FOUND != val) {
8401            uint8_t whiteLevel = (uint8_t)val;
8402            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8403                rc = BAD_VALUE;
8404            }
8405        }
8406    }
8407
8408    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8409        uint8_t fwk_cacMode =
8410                frame_settings.find(
8411                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8412        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8413                fwk_cacMode);
8414        if (NAME_NOT_FOUND != val) {
8415            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8416            bool entryAvailable = FALSE;
8417            // Check whether Frameworks set CAC mode is supported in device or not
8418            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8419                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
8420                    entryAvailable = TRUE;
8421                    break;
8422                }
8423            }
8424            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
8425            // If entry not found then set the device supported mode instead of frameworks mode i.e,
8426            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
8427            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
8428            if (entryAvailable == FALSE) {
8429                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8430                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8431                } else {
8432                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8433                        // High is not supported and so set the FAST as spec say's underlying
8434                        // device implementation can be the same for both modes.
8435                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
8436                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8437                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
8438                        // in order to avoid the fps drop due to high quality
8439                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8440                    } else {
8441                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8442                    }
8443                }
8444            }
8445            LOGD("Final cacMode is %d", cacMode);
8446            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8447                rc = BAD_VALUE;
8448            }
8449        } else {
8450            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
8451        }
8452    }
8453
8454    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8455        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8456        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8457                fwk_focusMode);
8458        if (NAME_NOT_FOUND != val) {
8459            uint8_t focusMode = (uint8_t)val;
8460            LOGD("set focus mode %d", focusMode);
8461            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8462                rc = BAD_VALUE;
8463            }
8464        }
8465    }
8466
8467    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8468        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8469        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8470                focalDistance)) {
8471            rc = BAD_VALUE;
8472        }
8473    }
8474
8475    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8476        uint8_t fwk_antibandingMode =
8477                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8478        int val = lookupHalName(ANTIBANDING_MODES_MAP,
8479                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8480        if (NAME_NOT_FOUND != val) {
8481            uint32_t hal_antibandingMode = (uint32_t)val;
8482            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8483                    hal_antibandingMode)) {
8484                rc = BAD_VALUE;
8485            }
8486        }
8487    }
8488
8489    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8490        int32_t expCompensation = frame_settings.find(
8491                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8492        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8493            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8494        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8495            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8496        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8497                expCompensation)) {
8498            rc = BAD_VALUE;
8499        }
8500    }
8501
8502    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8503        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8504        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8505            rc = BAD_VALUE;
8506        }
8507    }
8508    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8509        rc = setHalFpsRange(frame_settings, hal_metadata);
8510        if (rc != NO_ERROR) {
8511            LOGE("setHalFpsRange failed");
8512        }
8513    }
8514
8515    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8516        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8517        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8518            rc = BAD_VALUE;
8519        }
8520    }
8521
8522    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8523        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8524        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8525                fwk_effectMode);
8526        if (NAME_NOT_FOUND != val) {
8527            uint8_t effectMode = (uint8_t)val;
8528            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8529                rc = BAD_VALUE;
8530            }
8531        }
8532    }
8533
8534    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8535        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8536        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8537                colorCorrectMode)) {
8538            rc = BAD_VALUE;
8539        }
8540    }
8541
8542    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8543        cam_color_correct_gains_t colorCorrectGains;
8544        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8545            colorCorrectGains.gains[i] =
8546                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8547        }
8548        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8549                colorCorrectGains)) {
8550            rc = BAD_VALUE;
8551        }
8552    }
8553
8554    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8555        cam_color_correct_matrix_t colorCorrectTransform;
8556        cam_rational_type_t transform_elem;
8557        size_t num = 0;
8558        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8559           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8560              transform_elem.numerator =
8561                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8562              transform_elem.denominator =
8563                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8564              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8565              num++;
8566           }
8567        }
8568        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8569                colorCorrectTransform)) {
8570            rc = BAD_VALUE;
8571        }
8572    }
8573
8574    cam_trigger_t aecTrigger;
8575    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8576    aecTrigger.trigger_id = -1;
8577    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8578        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8579        aecTrigger.trigger =
8580            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8581        aecTrigger.trigger_id =
8582            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8583        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8584                aecTrigger)) {
8585            rc = BAD_VALUE;
8586        }
8587        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
8588                aecTrigger.trigger, aecTrigger.trigger_id);
8589    }
8590
8591    /*af_trigger must come with a trigger id*/
8592    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8593        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8594        cam_trigger_t af_trigger;
8595        af_trigger.trigger =
8596            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8597        af_trigger.trigger_id =
8598            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8599        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8600            rc = BAD_VALUE;
8601        }
8602        LOGD("AfTrigger: %d AfTriggerID: %d",
8603                af_trigger.trigger, af_trigger.trigger_id);
8604    }
8605
8606    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8607        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8608        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8609            rc = BAD_VALUE;
8610        }
8611    }
8612    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8613        cam_edge_application_t edge_application;
8614        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8615        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8616            edge_application.sharpness = 0;
8617        } else {
8618            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8619        }
8620        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8621            rc = BAD_VALUE;
8622        }
8623    }
8624
8625    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8626        int32_t respectFlashMode = 1;
8627        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8628            uint8_t fwk_aeMode =
8629                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8630            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8631                respectFlashMode = 0;
8632                LOGH("AE Mode controls flash, ignore android.flash.mode");
8633            }
8634        }
8635        if (respectFlashMode) {
8636            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8637                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8638            LOGH("flash mode after mapping %d", val);
8639            // To check: CAM_INTF_META_FLASH_MODE usage
8640            if (NAME_NOT_FOUND != val) {
8641                uint8_t flashMode = (uint8_t)val;
8642                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8643                    rc = BAD_VALUE;
8644                }
8645            }
8646        }
8647    }
8648
8649    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8650        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8651        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8652            rc = BAD_VALUE;
8653        }
8654    }
8655
8656    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8657        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8658        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8659                flashFiringTime)) {
8660            rc = BAD_VALUE;
8661        }
8662    }
8663
8664    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8665        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8666        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8667                hotPixelMode)) {
8668            rc = BAD_VALUE;
8669        }
8670    }
8671
8672    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8673        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8674        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8675                lensAperture)) {
8676            rc = BAD_VALUE;
8677        }
8678    }
8679
8680    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8681        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8682        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8683                filterDensity)) {
8684            rc = BAD_VALUE;
8685        }
8686    }
8687
8688    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8689        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8690        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8691                focalLength)) {
8692            rc = BAD_VALUE;
8693        }
8694    }
8695
8696    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8697        uint8_t optStabMode =
8698                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8699        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8700                optStabMode)) {
8701            rc = BAD_VALUE;
8702        }
8703    }
8704
8705    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8706        uint8_t videoStabMode =
8707                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8708        LOGD("videoStabMode from APP = %d", videoStabMode);
8709        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8710                videoStabMode)) {
8711            rc = BAD_VALUE;
8712        }
8713    }
8714
8715
8716    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8717        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8718        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8719                noiseRedMode)) {
8720            rc = BAD_VALUE;
8721        }
8722    }
8723
8724    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8725        float reprocessEffectiveExposureFactor =
8726            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8727        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8728                reprocessEffectiveExposureFactor)) {
8729            rc = BAD_VALUE;
8730        }
8731    }
8732
8733    cam_crop_region_t scalerCropRegion;
8734    bool scalerCropSet = false;
8735    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8736        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8737        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8738        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8739        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8740
8741        // Map coordinate system from active array to sensor output.
8742        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8743                scalerCropRegion.width, scalerCropRegion.height);
8744
8745        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8746                scalerCropRegion)) {
8747            rc = BAD_VALUE;
8748        }
8749        scalerCropSet = true;
8750    }
8751
8752    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8753        int64_t sensorExpTime =
8754                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8755        LOGD("setting sensorExpTime %lld", sensorExpTime);
8756        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8757                sensorExpTime)) {
8758            rc = BAD_VALUE;
8759        }
8760    }
8761
8762    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8763        int64_t sensorFrameDuration =
8764                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8765        int64_t minFrameDuration = getMinFrameDuration(request);
8766        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8767        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8768            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8769        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
8770        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8771                sensorFrameDuration)) {
8772            rc = BAD_VALUE;
8773        }
8774    }
8775
8776    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8777        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8778        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8779                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8780        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8781                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8782        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
8783        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8784                sensorSensitivity)) {
8785            rc = BAD_VALUE;
8786        }
8787    }
8788
8789    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
8790        int32_t ispSensitivity =
8791            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
8792        if (ispSensitivity <
8793            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
8794                ispSensitivity =
8795                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8796                LOGD("clamp ispSensitivity to %d", ispSensitivity);
8797        }
8798        if (ispSensitivity >
8799            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
8800                ispSensitivity =
8801                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
8802                LOGD("clamp ispSensitivity to %d", ispSensitivity);
8803        }
8804        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
8805                ispSensitivity)) {
8806            rc = BAD_VALUE;
8807        }
8808    }
8809
8810    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8811        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8812        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8813            rc = BAD_VALUE;
8814        }
8815    }
8816
8817    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8818        uint8_t fwk_facedetectMode =
8819                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8820
8821        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8822                fwk_facedetectMode);
8823
8824        if (NAME_NOT_FOUND != val) {
8825            uint8_t facedetectMode = (uint8_t)val;
8826            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8827                    facedetectMode)) {
8828                rc = BAD_VALUE;
8829            }
8830        }
8831    }
8832
8833    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8834        uint8_t histogramMode =
8835                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8836        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8837                histogramMode)) {
8838            rc = BAD_VALUE;
8839        }
8840    }
8841
8842    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8843        uint8_t sharpnessMapMode =
8844                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8845        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8846                sharpnessMapMode)) {
8847            rc = BAD_VALUE;
8848        }
8849    }
8850
8851    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8852        uint8_t tonemapMode =
8853                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8854        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8855            rc = BAD_VALUE;
8856        }
8857    }
8858    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8859    /*All tonemap channels will have the same number of points*/
8860    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8861        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8862        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8863        cam_rgb_tonemap_curves tonemapCurves;
8864        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8865        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8866            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
8867                     tonemapCurves.tonemap_points_cnt,
8868                    CAM_MAX_TONEMAP_CURVE_SIZE);
8869            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8870        }
8871
8872        /* ch0 = G*/
8873        size_t point = 0;
8874        cam_tonemap_curve_t tonemapCurveGreen;
8875        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8876            for (size_t j = 0; j < 2; j++) {
8877               tonemapCurveGreen.tonemap_points[i][j] =
8878                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8879               point++;
8880            }
8881        }
8882        tonemapCurves.curves[0] = tonemapCurveGreen;
8883
8884        /* ch 1 = B */
8885        point = 0;
8886        cam_tonemap_curve_t tonemapCurveBlue;
8887        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8888            for (size_t j = 0; j < 2; j++) {
8889               tonemapCurveBlue.tonemap_points[i][j] =
8890                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8891               point++;
8892            }
8893        }
8894        tonemapCurves.curves[1] = tonemapCurveBlue;
8895
8896        /* ch 2 = R */
8897        point = 0;
8898        cam_tonemap_curve_t tonemapCurveRed;
8899        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8900            for (size_t j = 0; j < 2; j++) {
8901               tonemapCurveRed.tonemap_points[i][j] =
8902                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8903               point++;
8904            }
8905        }
8906        tonemapCurves.curves[2] = tonemapCurveRed;
8907
8908        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8909                tonemapCurves)) {
8910            rc = BAD_VALUE;
8911        }
8912    }
8913
8914    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8915        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8916        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8917                captureIntent)) {
8918            rc = BAD_VALUE;
8919        }
8920    }
8921
8922    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8923        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8924        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8925                blackLevelLock)) {
8926            rc = BAD_VALUE;
8927        }
8928    }
8929
8930    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8931        uint8_t lensShadingMapMode =
8932                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8933        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8934                lensShadingMapMode)) {
8935            rc = BAD_VALUE;
8936        }
8937    }
8938
8939    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8940        cam_area_t roi;
8941        bool reset = true;
8942        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8943
8944        // Map coordinate system from active array to sensor output.
8945        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8946                roi.rect.height);
8947
8948        if (scalerCropSet) {
8949            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8950        }
8951        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8952            rc = BAD_VALUE;
8953        }
8954    }
8955
8956    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8957        cam_area_t roi;
8958        bool reset = true;
8959        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8960
8961        // Map coordinate system from active array to sensor output.
8962        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8963                roi.rect.height);
8964
8965        if (scalerCropSet) {
8966            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8967        }
8968        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8969            rc = BAD_VALUE;
8970        }
8971    }
8972
8973    // CDS for non-HFR non-video mode
8974    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8975            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
8976        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8977        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8978            LOGE("Invalid CDS mode %d!", *fwk_cds);
8979        } else {
8980            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8981                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8982                rc = BAD_VALUE;
8983            }
8984        }
8985    }
8986
8987    // TNR
8988    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8989        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8990        uint8_t b_TnrRequested = 0;
8991        cam_denoise_param_t tnr;
8992        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8993        tnr.process_plates =
8994            (cam_denoise_process_type_t)frame_settings.find(
8995            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8996        b_TnrRequested = tnr.denoise_enable;
8997        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8998            rc = BAD_VALUE;
8999        }
9000    }
9001
9002    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9003        int32_t fwk_testPatternMode =
9004                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9005        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9006                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9007
9008        if (NAME_NOT_FOUND != testPatternMode) {
9009            cam_test_pattern_data_t testPatternData;
9010            memset(&testPatternData, 0, sizeof(testPatternData));
9011            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9012            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9013                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9014                int32_t *fwk_testPatternData =
9015                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9016                testPatternData.r = fwk_testPatternData[0];
9017                testPatternData.b = fwk_testPatternData[3];
9018                switch (gCamCapability[mCameraId]->color_arrangement) {
9019                    case CAM_FILTER_ARRANGEMENT_RGGB:
9020                    case CAM_FILTER_ARRANGEMENT_GRBG:
9021                        testPatternData.gr = fwk_testPatternData[1];
9022                        testPatternData.gb = fwk_testPatternData[2];
9023                        break;
9024                    case CAM_FILTER_ARRANGEMENT_GBRG:
9025                    case CAM_FILTER_ARRANGEMENT_BGGR:
9026                        testPatternData.gr = fwk_testPatternData[2];
9027                        testPatternData.gb = fwk_testPatternData[1];
9028                        break;
9029                    default:
9030                        LOGE("color arrangement %d is not supported",
9031                                gCamCapability[mCameraId]->color_arrangement);
9032                        break;
9033                }
9034            }
9035            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9036                    testPatternData)) {
9037                rc = BAD_VALUE;
9038            }
9039        } else {
9040            LOGE("Invalid framework sensor test pattern mode %d",
9041                    fwk_testPatternMode);
9042        }
9043    }
9044
9045    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9046        size_t count = 0;
9047        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9048        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9049                gps_coords.data.d, gps_coords.count, count);
9050        if (gps_coords.count != count) {
9051            rc = BAD_VALUE;
9052        }
9053    }
9054
9055    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9056        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9057        size_t count = 0;
9058        const char *gps_methods_src = (const char *)
9059                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9060        memset(gps_methods, '\0', sizeof(gps_methods));
9061        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9062        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9063                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9064        if (GPS_PROCESSING_METHOD_SIZE != count) {
9065            rc = BAD_VALUE;
9066        }
9067    }
9068
9069    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9070        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9071        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9072                gps_timestamp)) {
9073            rc = BAD_VALUE;
9074        }
9075    }
9076
9077    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9078        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9079        cam_rotation_info_t rotation_info;
9080        if (orientation == 0) {
9081           rotation_info.rotation = ROTATE_0;
9082        } else if (orientation == 90) {
9083           rotation_info.rotation = ROTATE_90;
9084        } else if (orientation == 180) {
9085           rotation_info.rotation = ROTATE_180;
9086        } else if (orientation == 270) {
9087           rotation_info.rotation = ROTATE_270;
9088        }
9089        rotation_info.streamId = snapshotStreamId;
9090        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9091        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9092            rc = BAD_VALUE;
9093        }
9094    }
9095
9096    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9097        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9098        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9099            rc = BAD_VALUE;
9100        }
9101    }
9102
9103    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9104        uint32_t thumb_quality = (uint32_t)
9105                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9106        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9107                thumb_quality)) {
9108            rc = BAD_VALUE;
9109        }
9110    }
9111
9112    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9113        cam_dimension_t dim;
9114        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9115        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9116        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9117            rc = BAD_VALUE;
9118        }
9119    }
9120
9121    // Internal metadata
9122    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9123        size_t count = 0;
9124        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9125        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9126                privatedata.data.i32, privatedata.count, count);
9127        if (privatedata.count != count) {
9128            rc = BAD_VALUE;
9129        }
9130    }
9131
9132    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
9133        uint8_t* use_av_timer =
9134                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
9135        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
9136            rc = BAD_VALUE;
9137        }
9138    }
9139
9140    // EV step
9141    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9142            gCamCapability[mCameraId]->exp_compensation_step)) {
9143        rc = BAD_VALUE;
9144    }
9145
9146    // CDS info
9147    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9148        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9149                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9150
9151        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9152                CAM_INTF_META_CDS_DATA, *cdsData)) {
9153            rc = BAD_VALUE;
9154        }
9155    }
9156
9157    // Hybrid AE
9158    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9159        uint8_t *hybrid_ae = (uint8_t *)
9160                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9161
9162        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9163                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9164            rc = BAD_VALUE;
9165        }
9166    }
9167
9168    return rc;
9169}
9170
9171/*===========================================================================
9172 * FUNCTION   : captureResultCb
9173 *
9174 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9175 *
9176 * PARAMETERS :
9177 *   @frame  : frame information from mm-camera-interface
9178 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9179 *   @userdata: userdata
9180 *
9181 * RETURN     : NONE
9182 *==========================================================================*/
9183void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9184                camera3_stream_buffer_t *buffer,
9185                uint32_t frame_number, bool isInputBuffer, void *userdata)
9186{
9187    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9188    if (hw == NULL) {
9189        LOGE("Invalid hw %p", hw);
9190        return;
9191    }
9192
9193    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9194    return;
9195}
9196
9197
9198/*===========================================================================
9199 * FUNCTION   : initialize
9200 *
9201 * DESCRIPTION: Pass framework callback pointers to HAL
9202 *
9203 * PARAMETERS :
9204 *
9205 *
9206 * RETURN     : Success : 0
9207 *              Failure: -ENODEV
9208 *==========================================================================*/
9209
9210int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9211                                  const camera3_callback_ops_t *callback_ops)
9212{
9213    LOGD("E");
9214    QCamera3HardwareInterface *hw =
9215        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9216    if (!hw) {
9217        LOGE("NULL camera device");
9218        return -ENODEV;
9219    }
9220
9221    int rc = hw->initialize(callback_ops);
9222    LOGD("X");
9223    return rc;
9224}
9225
9226/*===========================================================================
9227 * FUNCTION   : configure_streams
9228 *
9229 * DESCRIPTION:
9230 *
9231 * PARAMETERS :
9232 *
9233 *
9234 * RETURN     : Success: 0
9235 *              Failure: -EINVAL (if stream configuration is invalid)
9236 *                       -ENODEV (fatal error)
9237 *==========================================================================*/
9238
9239int QCamera3HardwareInterface::configure_streams(
9240        const struct camera3_device *device,
9241        camera3_stream_configuration_t *stream_list)
9242{
9243    LOGD("E");
9244    QCamera3HardwareInterface *hw =
9245        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9246    if (!hw) {
9247        LOGE("NULL camera device");
9248        return -ENODEV;
9249    }
9250    int rc = hw->configureStreams(stream_list);
9251    LOGD("X");
9252    return rc;
9253}
9254
9255/*===========================================================================
9256 * FUNCTION   : construct_default_request_settings
9257 *
9258 * DESCRIPTION: Configure a settings buffer to meet the required use case
9259 *
9260 * PARAMETERS :
9261 *
9262 *
9263 * RETURN     : Success: Return valid metadata
9264 *              Failure: Return NULL
9265 *==========================================================================*/
9266const camera_metadata_t* QCamera3HardwareInterface::
9267    construct_default_request_settings(const struct camera3_device *device,
9268                                        int type)
9269{
9270
9271    LOGD("E");
9272    camera_metadata_t* fwk_metadata = NULL;
9273    QCamera3HardwareInterface *hw =
9274        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9275    if (!hw) {
9276        LOGE("NULL camera device");
9277        return NULL;
9278    }
9279
9280    fwk_metadata = hw->translateCapabilityToMetadata(type);
9281
9282    LOGD("X");
9283    return fwk_metadata;
9284}
9285
9286/*===========================================================================
9287 * FUNCTION   : process_capture_request
9288 *
9289 * DESCRIPTION:
9290 *
9291 * PARAMETERS :
9292 *
9293 *
9294 * RETURN     :
9295 *==========================================================================*/
9296int QCamera3HardwareInterface::process_capture_request(
9297                    const struct camera3_device *device,
9298                    camera3_capture_request_t *request)
9299{
9300    LOGD("E");
9301    QCamera3HardwareInterface *hw =
9302        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9303    if (!hw) {
9304        LOGE("NULL camera device");
9305        return -EINVAL;
9306    }
9307
9308    int rc = hw->processCaptureRequest(request);
9309    LOGD("X");
9310    return rc;
9311}
9312
9313/*===========================================================================
9314 * FUNCTION   : dump
9315 *
9316 * DESCRIPTION:
9317 *
9318 * PARAMETERS :
9319 *
9320 *
9321 * RETURN     :
9322 *==========================================================================*/
9323
9324void QCamera3HardwareInterface::dump(
9325                const struct camera3_device *device, int fd)
9326{
9327    /* Log level property is read when "adb shell dumpsys media.camera" is
9328       called so that the log level can be controlled without restarting
9329       the media server */
9330    getLogLevel();
9331
9332    LOGD("E");
9333    QCamera3HardwareInterface *hw =
9334        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9335    if (!hw) {
9336        LOGE("NULL camera device");
9337        return;
9338    }
9339
9340    hw->dump(fd);
9341    LOGD("X");
9342    return;
9343}
9344
9345/*===========================================================================
9346 * FUNCTION   : flush
9347 *
9348 * DESCRIPTION:
9349 *
9350 * PARAMETERS :
9351 *
9352 *
9353 * RETURN     :
9354 *==========================================================================*/
9355
9356int QCamera3HardwareInterface::flush(
9357                const struct camera3_device *device)
9358{
9359    int rc;
9360    LOGD("E");
9361    QCamera3HardwareInterface *hw =
9362        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9363    if (!hw) {
9364        LOGE("NULL camera device");
9365        return -EINVAL;
9366    }
9367
9368    pthread_mutex_lock(&hw->mMutex);
9369    // Validate current state
9370    switch (hw->mState) {
9371        case STARTED:
9372            /* valid state */
9373            break;
9374
9375        case ERROR:
9376            pthread_mutex_unlock(&hw->mMutex);
9377            hw->handleCameraDeviceError();
9378            return -ENODEV;
9379
9380        default:
9381            LOGI("Flush returned during state %d", hw->mState);
9382            pthread_mutex_unlock(&hw->mMutex);
9383            return 0;
9384    }
9385    pthread_mutex_unlock(&hw->mMutex);
9386
9387    rc = hw->flush(true /* restart channels */ );
9388    LOGD("X");
9389    return rc;
9390}
9391
9392/*===========================================================================
9393 * FUNCTION   : close_camera_device
9394 *
9395 * DESCRIPTION:
9396 *
9397 * PARAMETERS :
9398 *
9399 *
9400 * RETURN     :
9401 *==========================================================================*/
9402int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
9403{
9404    int ret = NO_ERROR;
9405    QCamera3HardwareInterface *hw =
9406        reinterpret_cast<QCamera3HardwareInterface *>(
9407            reinterpret_cast<camera3_device_t *>(device)->priv);
9408    if (!hw) {
9409        LOGE("NULL camera device");
9410        return BAD_VALUE;
9411    }
9412
9413    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
9414    delete hw;
9415    LOGI("[KPI Perf]: X");
9416    return ret;
9417}
9418
9419/*===========================================================================
9420 * FUNCTION   : getWaveletDenoiseProcessPlate
9421 *
9422 * DESCRIPTION: query wavelet denoise process plate
9423 *
9424 * PARAMETERS : None
9425 *
9426 * RETURN     : WNR prcocess plate value
9427 *==========================================================================*/
9428cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9429{
9430    char prop[PROPERTY_VALUE_MAX];
9431    memset(prop, 0, sizeof(prop));
9432    property_get("persist.denoise.process.plates", prop, "0");
9433    int processPlate = atoi(prop);
9434    switch(processPlate) {
9435    case 0:
9436        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9437    case 1:
9438        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9439    case 2:
9440        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9441    case 3:
9442        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9443    default:
9444        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9445    }
9446}
9447
9448
9449/*===========================================================================
9450 * FUNCTION   : getTemporalDenoiseProcessPlate
9451 *
9452 * DESCRIPTION: query temporal denoise process plate
9453 *
9454 * PARAMETERS : None
9455 *
9456 * RETURN     : TNR prcocess plate value
9457 *==========================================================================*/
9458cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9459{
9460    char prop[PROPERTY_VALUE_MAX];
9461    memset(prop, 0, sizeof(prop));
9462    property_get("persist.tnr.process.plates", prop, "0");
9463    int processPlate = atoi(prop);
9464    switch(processPlate) {
9465    case 0:
9466        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9467    case 1:
9468        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9469    case 2:
9470        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9471    case 3:
9472        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9473    default:
9474        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9475    }
9476}
9477
9478
9479/*===========================================================================
9480 * FUNCTION   : extractSceneMode
9481 *
9482 * DESCRIPTION: Extract scene mode from frameworks set metadata
9483 *
9484 * PARAMETERS :
9485 *      @frame_settings: CameraMetadata reference
9486 *      @metaMode: ANDROID_CONTORL_MODE
9487 *      @hal_metadata: hal metadata structure
9488 *
9489 * RETURN     : None
9490 *==========================================================================*/
9491int32_t QCamera3HardwareInterface::extractSceneMode(
9492        const CameraMetadata &frame_settings, uint8_t metaMode,
9493        metadata_buffer_t *hal_metadata)
9494{
9495    int32_t rc = NO_ERROR;
9496
9497    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9498        camera_metadata_ro_entry entry =
9499                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9500        if (0 == entry.count)
9501            return rc;
9502
9503        uint8_t fwk_sceneMode = entry.data.u8[0];
9504
9505        int val = lookupHalName(SCENE_MODES_MAP,
9506                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9507                fwk_sceneMode);
9508        if (NAME_NOT_FOUND != val) {
9509            uint8_t sceneMode = (uint8_t)val;
9510            LOGD("sceneMode: %d", sceneMode);
9511            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9512                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9513                rc = BAD_VALUE;
9514            }
9515        }
9516    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9517            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9518        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9519        LOGD("sceneMode: %d", sceneMode);
9520        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9521                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9522            rc = BAD_VALUE;
9523        }
9524    }
9525    return rc;
9526}
9527
9528/*===========================================================================
9529 * FUNCTION   : needRotationReprocess
9530 *
9531 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9532 *
9533 * PARAMETERS : none
9534 *
9535 * RETURN     : true: needed
9536 *              false: no need
9537 *==========================================================================*/
9538bool QCamera3HardwareInterface::needRotationReprocess()
9539{
9540    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9541        // current rotation is not zero, and pp has the capability to process rotation
9542        LOGH("need do reprocess for rotation");
9543        return true;
9544    }
9545
9546    return false;
9547}
9548
9549/*===========================================================================
9550 * FUNCTION   : needReprocess
9551 *
9552 * DESCRIPTION: if reprocess in needed
9553 *
9554 * PARAMETERS : none
9555 *
9556 * RETURN     : true: needed
9557 *              false: no need
9558 *==========================================================================*/
9559bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
9560{
9561    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
9562        // TODO: add for ZSL HDR later
9563        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9564        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9565            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
9566            return true;
9567        } else {
9568            LOGH("already post processed frame");
9569            return false;
9570        }
9571    }
9572    return needRotationReprocess();
9573}
9574
9575/*===========================================================================
9576 * FUNCTION   : needJpegExifRotation
9577 *
9578 * DESCRIPTION: if rotation from jpeg is needed
9579 *
9580 * PARAMETERS : none
9581 *
9582 * RETURN     : true: needed
9583 *              false: no need
9584 *==========================================================================*/
9585bool QCamera3HardwareInterface::needJpegExifRotation()
9586{
9587   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9588    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9589       LOGD("Need use Jpeg EXIF Rotation");
9590       return true;
9591    }
9592    return false;
9593}
9594
9595/*===========================================================================
9596 * FUNCTION   : addOfflineReprocChannel
9597 *
9598 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9599 *              coming from input channel
9600 *
9601 * PARAMETERS :
9602 *   @config  : reprocess configuration
9603 *   @inputChHandle : pointer to the input (source) channel
9604 *
9605 *
9606 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
9607 *==========================================================================*/
9608QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9609        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9610{
9611    int32_t rc = NO_ERROR;
9612    QCamera3ReprocessChannel *pChannel = NULL;
9613
9614    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9615            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9616            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9617    if (NULL == pChannel) {
9618        LOGE("no mem for reprocess channel");
9619        return NULL;
9620    }
9621
9622    rc = pChannel->initialize(IS_TYPE_NONE);
9623    if (rc != NO_ERROR) {
9624        LOGE("init reprocess channel failed, ret = %d", rc);
9625        delete pChannel;
9626        return NULL;
9627    }
9628
9629    // pp feature config
9630    cam_pp_feature_config_t pp_config;
9631    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9632
9633    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9634    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
9635            & CAM_QCOM_FEATURE_DSDN) {
9636        //Use CPP CDS incase h/w supports it.
9637        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
9638        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
9639    }
9640    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9641        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
9642    }
9643
9644    rc = pChannel->addReprocStreamsFromSource(pp_config,
9645            config,
9646            IS_TYPE_NONE,
9647            mMetadataChannel);
9648
9649    if (rc != NO_ERROR) {
9650        delete pChannel;
9651        return NULL;
9652    }
9653    return pChannel;
9654}
9655
9656/*===========================================================================
9657 * FUNCTION   : getMobicatMask
9658 *
9659 * DESCRIPTION: returns mobicat mask
9660 *
9661 * PARAMETERS : none
9662 *
9663 * RETURN     : mobicat mask
9664 *
9665 *==========================================================================*/
9666uint8_t QCamera3HardwareInterface::getMobicatMask()
9667{
9668    return m_MobicatMask;
9669}
9670
9671/*===========================================================================
9672 * FUNCTION   : setMobicat
9673 *
9674 * DESCRIPTION: set Mobicat on/off.
9675 *
9676 * PARAMETERS :
9677 *   @params  : none
9678 *
9679 * RETURN     : int32_t type of status
9680 *              NO_ERROR  -- success
9681 *              none-zero failure code
9682 *==========================================================================*/
9683int32_t QCamera3HardwareInterface::setMobicat()
9684{
9685    char value [PROPERTY_VALUE_MAX];
9686    property_get("persist.camera.mobicat", value, "0");
9687    int32_t ret = NO_ERROR;
9688    uint8_t enableMobi = (uint8_t)atoi(value);
9689
9690    if (enableMobi) {
9691        tune_cmd_t tune_cmd;
9692        tune_cmd.type = SET_RELOAD_CHROMATIX;
9693        tune_cmd.module = MODULE_ALL;
9694        tune_cmd.value = TRUE;
9695        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9696                CAM_INTF_PARM_SET_VFE_COMMAND,
9697                tune_cmd);
9698
9699        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9700                CAM_INTF_PARM_SET_PP_COMMAND,
9701                tune_cmd);
9702    }
9703    m_MobicatMask = enableMobi;
9704
9705    return ret;
9706}
9707
9708/*===========================================================================
9709* FUNCTION   : getLogLevel
9710*
9711* DESCRIPTION: Reads the log level property into a variable
9712*
9713* PARAMETERS :
9714*   None
9715*
9716* RETURN     :
9717*   None
9718*==========================================================================*/
9719void QCamera3HardwareInterface::getLogLevel()
9720{
9721    char prop[PROPERTY_VALUE_MAX];
9722    uint32_t globalLogLevel = 0;
9723
9724    property_get("persist.camera.hal.debug", prop, "0");
9725    int val = atoi(prop);
9726    if (0 <= val) {
9727        gCamHal3LogLevel = (uint32_t)val;
9728    }
9729
9730    property_get("persist.camera.kpi.debug", prop, "1");
9731    gKpiDebugLevel = atoi(prop);
9732
9733    property_get("persist.camera.global.debug", prop, "0");
9734    val = atoi(prop);
9735    if (0 <= val) {
9736        globalLogLevel = (uint32_t)val;
9737    }
9738
9739    /* Highest log level among hal.logs and global.logs is selected */
9740    if (gCamHal3LogLevel < globalLogLevel)
9741        gCamHal3LogLevel = globalLogLevel;
9742
9743    return;
9744}
9745
9746/*===========================================================================
9747 * FUNCTION   : validateStreamRotations
9748 *
9749 * DESCRIPTION: Check if the rotations requested are supported
9750 *
9751 * PARAMETERS :
9752 *   @stream_list : streams to be configured
9753 *
9754 * RETURN     : NO_ERROR on success
9755 *              -EINVAL on failure
9756 *
9757 *==========================================================================*/
9758int QCamera3HardwareInterface::validateStreamRotations(
9759        camera3_stream_configuration_t *streamList)
9760{
9761    int rc = NO_ERROR;
9762
9763    /*
9764    * Loop through all streams requested in configuration
9765    * Check if unsupported rotations have been requested on any of them
9766    */
9767    for (size_t j = 0; j < streamList->num_streams; j++){
9768        camera3_stream_t *newStream = streamList->streams[j];
9769
9770        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9771        bool isImplDef = (newStream->format ==
9772                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9773        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9774                isImplDef);
9775
9776        if (isRotated && (!isImplDef || isZsl)) {
9777            LOGE("Error: Unsupported rotation of %d requested for stream"
9778                    "type:%d and stream format:%d",
9779                    newStream->rotation, newStream->stream_type,
9780                    newStream->format);
9781            rc = -EINVAL;
9782            break;
9783        }
9784    }
9785
9786    return rc;
9787}
9788
9789/*===========================================================================
9790* FUNCTION   : getFlashInfo
9791*
9792* DESCRIPTION: Retrieve information about whether the device has a flash.
9793*
9794* PARAMETERS :
9795*   @cameraId  : Camera id to query
9796*   @hasFlash  : Boolean indicating whether there is a flash device
9797*                associated with given camera
9798*   @flashNode : If a flash device exists, this will be its device node.
9799*
9800* RETURN     :
9801*   None
9802*==========================================================================*/
9803void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9804        bool& hasFlash,
9805        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9806{
9807    cam_capability_t* camCapability = gCamCapability[cameraId];
9808    if (NULL == camCapability) {
9809        hasFlash = false;
9810        flashNode[0] = '\0';
9811    } else {
9812        hasFlash = camCapability->flash_available;
9813        strlcpy(flashNode,
9814                (char*)camCapability->flash_dev_name,
9815                QCAMERA_MAX_FILEPATH_LENGTH);
9816    }
9817}
9818
9819/*===========================================================================
9820* FUNCTION   : getEepromVersionInfo
9821*
9822* DESCRIPTION: Retrieve version info of the sensor EEPROM data
9823*
9824* PARAMETERS : None
9825*
9826* RETURN     : string describing EEPROM version
9827*              "\0" if no such info available
9828*==========================================================================*/
9829const char *QCamera3HardwareInterface::getEepromVersionInfo()
9830{
9831    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9832}
9833
9834/*===========================================================================
9835* FUNCTION   : getLdafCalib
9836*
9837* DESCRIPTION: Retrieve Laser AF calibration data
9838*
9839* PARAMETERS : None
9840*
9841* RETURN     : Two uint32_t describing laser AF calibration data
9842*              NULL if none is available.
9843*==========================================================================*/
9844const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9845{
9846    if (mLdafCalibExist) {
9847        return &mLdafCalib[0];
9848    } else {
9849        return NULL;
9850    }
9851}
9852
9853/*===========================================================================
9854 * FUNCTION   : dynamicUpdateMetaStreamInfo
9855 *
9856 * DESCRIPTION: This function:
9857 *             (1) stops all the channels
9858 *             (2) returns error on pending requests and buffers
9859 *             (3) sends metastream_info in setparams
9860 *             (4) starts all channels
9861 *             This is useful when sensor has to be restarted to apply any
9862 *             settings such as frame rate from a different sensor mode
9863 *
9864 * PARAMETERS : None
9865 *
9866 * RETURN     : NO_ERROR on success
9867 *              Error codes on failure
9868 *
9869 *==========================================================================*/
9870int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9871{
9872    ATRACE_CALL();
9873    int rc = NO_ERROR;
9874
9875    LOGD("E");
9876
9877    rc = stopAllChannels();
9878    if (rc < 0) {
9879        LOGE("stopAllChannels failed");
9880        return rc;
9881    }
9882
9883    rc = notifyErrorForPendingRequests();
9884    if (rc < 0) {
9885        LOGE("notifyErrorForPendingRequests failed");
9886        return rc;
9887    }
9888
9889    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
9890        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
9891                "Format:%d",
9892                mStreamConfigInfo.type[i],
9893                mStreamConfigInfo.stream_sizes[i].width,
9894                mStreamConfigInfo.stream_sizes[i].height,
9895                mStreamConfigInfo.postprocess_mask[i],
9896                mStreamConfigInfo.format[i]);
9897    }
9898
9899    /* Send meta stream info once again so that ISP can start */
9900    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9901            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9902    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9903            mParameters);
9904    if (rc < 0) {
9905        LOGE("set Metastreaminfo failed. Sensor mode does not change");
9906    }
9907
9908    rc = startAllChannels();
9909    if (rc < 0) {
9910        LOGE("startAllChannels failed");
9911        return rc;
9912    }
9913
9914    LOGD("X");
9915    return rc;
9916}
9917
9918/*===========================================================================
9919 * FUNCTION   : stopAllChannels
9920 *
9921 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9922 *
9923 * PARAMETERS : None
9924 *
9925 * RETURN     : NO_ERROR on success
9926 *              Error codes on failure
9927 *
9928 *==========================================================================*/
9929int32_t QCamera3HardwareInterface::stopAllChannels()
9930{
9931    int32_t rc = NO_ERROR;
9932
9933    LOGD("Stopping all channels");
9934    // Stop the Streams/Channels
9935    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9936        it != mStreamInfo.end(); it++) {
9937        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9938        if (channel) {
9939            channel->stop();
9940        }
9941        (*it)->status = INVALID;
9942    }
9943
9944    if (mSupportChannel) {
9945        mSupportChannel->stop();
9946    }
9947    if (mAnalysisChannel) {
9948        mAnalysisChannel->stop();
9949    }
9950    if (mRawDumpChannel) {
9951        mRawDumpChannel->stop();
9952    }
9953    if (mMetadataChannel) {
9954        /* If content of mStreamInfo is not 0, there is metadata stream */
9955        mMetadataChannel->stop();
9956    }
9957
9958    LOGD("All channels stopped");
9959    return rc;
9960}
9961
9962/*===========================================================================
9963 * FUNCTION   : startAllChannels
9964 *
9965 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9966 *
9967 * PARAMETERS : None
9968 *
9969 * RETURN     : NO_ERROR on success
9970 *              Error codes on failure
9971 *
9972 *==========================================================================*/
9973int32_t QCamera3HardwareInterface::startAllChannels()
9974{
9975    int32_t rc = NO_ERROR;
9976
9977    LOGD("Start all channels ");
9978    // Start the Streams/Channels
9979    if (mMetadataChannel) {
9980        /* If content of mStreamInfo is not 0, there is metadata stream */
9981        rc = mMetadataChannel->start();
9982        if (rc < 0) {
9983            LOGE("META channel start failed");
9984            return rc;
9985        }
9986    }
9987    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9988        it != mStreamInfo.end(); it++) {
9989        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9990        if (channel) {
9991            rc = channel->start();
9992            if (rc < 0) {
9993                LOGE("channel start failed");
9994                return rc;
9995            }
9996        }
9997    }
9998    if (mAnalysisChannel) {
9999        mAnalysisChannel->start();
10000    }
10001    if (mSupportChannel) {
10002        rc = mSupportChannel->start();
10003        if (rc < 0) {
10004            LOGE("Support channel start failed");
10005            return rc;
10006        }
10007    }
10008    if (mRawDumpChannel) {
10009        rc = mRawDumpChannel->start();
10010        if (rc < 0) {
10011            LOGE("RAW dump channel start failed");
10012            return rc;
10013        }
10014    }
10015
10016    LOGD("All channels started");
10017    return rc;
10018}
10019
10020/*===========================================================================
10021 * FUNCTION   : notifyErrorForPendingRequests
10022 *
10023 * DESCRIPTION: This function sends error for all the pending requests/buffers
10024 *
10025 * PARAMETERS : None
10026 *
10027 * RETURN     : Error codes
10028 *              NO_ERROR on success
10029 *
10030 *==========================================================================*/
10031int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10032{
10033    int32_t rc = NO_ERROR;
10034    unsigned int frameNum = 0;
10035    camera3_capture_result_t result;
10036    camera3_stream_buffer_t *pStream_Buf = NULL;
10037
10038    memset(&result, 0, sizeof(camera3_capture_result_t));
10039
10040    if (mPendingRequestsList.size() > 0) {
10041        pendingRequestIterator i = mPendingRequestsList.begin();
10042        frameNum = i->frame_number;
10043    } else {
10044        /* There might still be pending buffers even though there are
10045         no pending requests. Setting the frameNum to MAX so that
10046         all the buffers with smaller frame numbers are returned */
10047        frameNum = UINT_MAX;
10048    }
10049
10050    LOGH("Oldest frame num on mPendingRequestsList = %u",
10051       frameNum);
10052
10053    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10054            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10055
10056        if (req->frame_number < frameNum) {
10057            // Send Error notify to frameworks for each buffer for which
10058            // metadata buffer is already sent
10059            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10060                req->frame_number, req->mPendingBufferList.size());
10061
10062            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10063            if (NULL == pStream_Buf) {
10064                LOGE("No memory for pending buffers array");
10065                return NO_MEMORY;
10066            }
10067            memset(pStream_Buf, 0,
10068                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10069            result.result = NULL;
10070            result.frame_number = req->frame_number;
10071            result.num_output_buffers = req->mPendingBufferList.size();
10072            result.output_buffers = pStream_Buf;
10073
10074            size_t index = 0;
10075            for (auto info = req->mPendingBufferList.begin();
10076                info != req->mPendingBufferList.end(); ) {
10077
10078                camera3_notify_msg_t notify_msg;
10079                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10080                notify_msg.type = CAMERA3_MSG_ERROR;
10081                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10082                notify_msg.message.error.error_stream = info->stream;
10083                notify_msg.message.error.frame_number = req->frame_number;
10084                pStream_Buf[index].acquire_fence = -1;
10085                pStream_Buf[index].release_fence = -1;
10086                pStream_Buf[index].buffer = info->buffer;
10087                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10088                pStream_Buf[index].stream = info->stream;
10089                mCallbackOps->notify(mCallbackOps, &notify_msg);
10090                index++;
10091                // Remove buffer from list
10092                info = req->mPendingBufferList.erase(info);
10093            }
10094
10095            // Remove this request from Map
10096            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10097                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10098            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10099
10100            mCallbackOps->process_capture_result(mCallbackOps, &result);
10101
10102            delete [] pStream_Buf;
10103        } else {
10104
10105            // Go through the pending requests info and send error request to framework
10106            LOGE("Sending ERROR REQUEST for all pending requests");
10107            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10108
10109            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10110
10111            // Send error notify to frameworks
10112            camera3_notify_msg_t notify_msg;
10113            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10114            notify_msg.type = CAMERA3_MSG_ERROR;
10115            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10116            notify_msg.message.error.error_stream = NULL;
10117            notify_msg.message.error.frame_number = req->frame_number;
10118            mCallbackOps->notify(mCallbackOps, &notify_msg);
10119
10120            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10121            if (NULL == pStream_Buf) {
10122                LOGE("No memory for pending buffers array");
10123                return NO_MEMORY;
10124            }
10125            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10126
10127            result.result = NULL;
10128            result.frame_number = req->frame_number;
10129            result.input_buffer = i->input_buffer;
10130            result.num_output_buffers = req->mPendingBufferList.size();
10131            result.output_buffers = pStream_Buf;
10132
10133            size_t index = 0;
10134            for (auto info = req->mPendingBufferList.begin();
10135                info != req->mPendingBufferList.end(); ) {
10136                pStream_Buf[index].acquire_fence = -1;
10137                pStream_Buf[index].release_fence = -1;
10138                pStream_Buf[index].buffer = info->buffer;
10139                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10140                pStream_Buf[index].stream = info->stream;
10141                index++;
10142                // Remove buffer from list
10143                info = req->mPendingBufferList.erase(info);
10144            }
10145
10146            // Remove this request from Map
10147            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10148                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10149            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10150
10151            mCallbackOps->process_capture_result(mCallbackOps, &result);
10152            delete [] pStream_Buf;
10153            i = erasePendingRequest(i);
10154        }
10155    }
10156
10157    /* Reset pending frame Drop list and requests list */
10158    mPendingFrameDropList.clear();
10159
10160    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10161        req.mPendingBufferList.clear();
10162    }
10163    mPendingBuffersMap.mPendingBuffersInRequest.clear();
10164    mPendingReprocessResultList.clear();
10165    LOGH("Cleared all the pending buffers ");
10166
10167    return rc;
10168}
10169
10170bool QCamera3HardwareInterface::isOnEncoder(
10171        const cam_dimension_t max_viewfinder_size,
10172        uint32_t width, uint32_t height)
10173{
10174    return (width > (uint32_t)max_viewfinder_size.width ||
10175            height > (uint32_t)max_viewfinder_size.height);
10176}
10177
10178/*===========================================================================
10179 * FUNCTION   : setBundleInfo
10180 *
10181 * DESCRIPTION: Set bundle info for all streams that are bundle.
10182 *
10183 * PARAMETERS : None
10184 *
10185 * RETURN     : NO_ERROR on success
10186 *              Error codes on failure
10187 *==========================================================================*/
10188int32_t QCamera3HardwareInterface::setBundleInfo()
10189{
10190    int32_t rc = NO_ERROR;
10191
10192    if (mChannelHandle) {
10193        cam_bundle_config_t bundleInfo;
10194        memset(&bundleInfo, 0, sizeof(bundleInfo));
10195        rc = mCameraHandle->ops->get_bundle_info(
10196                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10197        if (rc != NO_ERROR) {
10198            LOGE("get_bundle_info failed");
10199            return rc;
10200        }
10201        if (mAnalysisChannel) {
10202            mAnalysisChannel->setBundleInfo(bundleInfo);
10203        }
10204        if (mSupportChannel) {
10205            mSupportChannel->setBundleInfo(bundleInfo);
10206        }
10207        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10208                it != mStreamInfo.end(); it++) {
10209            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10210            channel->setBundleInfo(bundleInfo);
10211        }
10212        if (mRawDumpChannel) {
10213            mRawDumpChannel->setBundleInfo(bundleInfo);
10214        }
10215    }
10216
10217    return rc;
10218}
10219
10220/*===========================================================================
10221 * FUNCTION   : get_num_overall_buffers
10222 *
10223 * DESCRIPTION: Estimate number of pending buffers across all requests.
10224 *
10225 * PARAMETERS : None
10226 *
10227 * RETURN     : Number of overall pending buffers
10228 *
10229 *==========================================================================*/
10230uint32_t PendingBuffersMap::get_num_overall_buffers()
10231{
10232    uint32_t sum_buffers = 0;
10233    for (auto &req : mPendingBuffersInRequest) {
10234        sum_buffers += req.mPendingBufferList.size();
10235    }
10236    return sum_buffers;
10237}
10238
10239/*===========================================================================
10240 * FUNCTION   : removeBuf
10241 *
10242 * DESCRIPTION: Remove a matching buffer from tracker.
10243 *
10244 * PARAMETERS : @buffer: image buffer for the callback
10245 *
10246 * RETURN     : None
10247 *
10248 *==========================================================================*/
10249void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10250{
10251    bool buffer_found = false;
10252    for (auto req = mPendingBuffersInRequest.begin();
10253            req != mPendingBuffersInRequest.end(); req++) {
10254        for (auto k = req->mPendingBufferList.begin();
10255                k != req->mPendingBufferList.end(); k++ ) {
10256            if (k->buffer == buffer) {
10257                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10258                        req->frame_number, buffer);
10259                k = req->mPendingBufferList.erase(k);
10260                if (req->mPendingBufferList.empty()) {
10261                    // Remove this request from Map
10262                    req = mPendingBuffersInRequest.erase(req);
10263                }
10264                buffer_found = true;
10265                break;
10266            }
10267        }
10268        if (buffer_found) {
10269            break;
10270        }
10271    }
10272    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
10273            get_num_overall_buffers());
10274}
10275
10276}; //end namespace qcamera
10277