QCamera3HWI.cpp revision 3b3a1c613f3049a13bd4a9d4e6efb2f99520eb9c
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <time.h>
44#include <sync/sync.h>
45#include "gralloc_priv.h"
46
47// Display dependencies
48#include "qdMetaData.h"
49
50// Camera dependencies
51#include "android/QCamera3External.h"
52#include "util/QCameraFlash.h"
53#include "QCamera3HWI.h"
54#include "QCamera3VendorTags.h"
55#include "QCameraTrace.h"
56
57extern "C" {
58#include "mm_camera_dbg.h"
59}
60
61using namespace android;
62
63namespace qcamera {
64
65#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
66
67#define EMPTY_PIPELINE_DELAY 2
68#define PARTIAL_RESULT_COUNT 2
69#define FRAME_SKIP_DELAY     0
70
71#define MAX_VALUE_8BIT ((1<<8)-1)
72#define MAX_VALUE_10BIT ((1<<10)-1)
73#define MAX_VALUE_12BIT ((1<<12)-1)
74
75#define VIDEO_4K_WIDTH  3840
76#define VIDEO_4K_HEIGHT 2160
77
78#define MAX_EIS_WIDTH 3840
79#define MAX_EIS_HEIGHT 2160
80
81#define MAX_RAW_STREAMS        1
82#define MAX_STALLING_STREAMS   1
83#define MAX_PROCESSED_STREAMS  3
84/* Batch mode is enabled only if FPS set is equal to or greater than this */
85#define MIN_FPS_FOR_BATCH_MODE (120)
86#define PREVIEW_FPS_FOR_HFR    (30)
87#define DEFAULT_VIDEO_FPS      (30.0)
88#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
89#define MAX_HFR_BATCH_SIZE     (8)
90#define REGIONS_TUPLE_COUNT    5
91#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
92// Set a threshold for detection of missing buffers //seconds
93#define MISSING_REQUEST_BUF_TIMEOUT 3
94#define FLUSH_TIMEOUT 3
95#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
96
97#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
98                                              CAM_QCOM_FEATURE_CROP |\
99                                              CAM_QCOM_FEATURE_ROTATION |\
100                                              CAM_QCOM_FEATURE_SHARPNESS |\
101                                              CAM_QCOM_FEATURE_SCALE |\
102                                              CAM_QCOM_FEATURE_CAC |\
103                                              CAM_QCOM_FEATURE_CDS )
104/* Per configuration size for static metadata length*/
105#define PER_CONFIGURATION_SIZE_3 (3)
106
107#define TIMEOUT_NEVER -1
108
109cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
110const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
111extern pthread_mutex_t gCamLock;
112volatile uint32_t gCamHal3LogLevel = 1;
113extern uint8_t gNumCameraSessions;
114
115const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
116    {"On",  CAM_CDS_MODE_ON},
117    {"Off", CAM_CDS_MODE_OFF},
118    {"Auto",CAM_CDS_MODE_AUTO}
119};
120
121const QCamera3HardwareInterface::QCameraMap<
122        camera_metadata_enum_android_control_effect_mode_t,
123        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
124    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
125    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
126    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
127    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
128    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
129    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
130    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
132    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
133};
134
135const QCamera3HardwareInterface::QCameraMap<
136        camera_metadata_enum_android_control_awb_mode_t,
137        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
138    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
139    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
140    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
141    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
142    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
143    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
145    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
146    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
147};
148
149const QCamera3HardwareInterface::QCameraMap<
150        camera_metadata_enum_android_control_scene_mode_t,
151        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
152    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
153    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
154    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
155    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
157    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
158    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
159    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
160    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
161    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
162    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
163    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
164    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
165    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
166    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
167    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
168};
169
170const QCamera3HardwareInterface::QCameraMap<
171        camera_metadata_enum_android_control_af_mode_t,
172        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
174    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
175    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
176    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
177    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
179    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
180};
181
182const QCamera3HardwareInterface::QCameraMap<
183        camera_metadata_enum_android_color_correction_aberration_mode_t,
184        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
185    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
186            CAM_COLOR_CORRECTION_ABERRATION_OFF },
187    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
188            CAM_COLOR_CORRECTION_ABERRATION_FAST },
189    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
190            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194        camera_metadata_enum_android_control_ae_antibanding_mode_t,
195        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
199    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
200};
201
202const QCamera3HardwareInterface::QCameraMap<
203        camera_metadata_enum_android_control_ae_mode_t,
204        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
205    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
207    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
208    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
209    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213        camera_metadata_enum_android_flash_mode_t,
214        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
215    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
216    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
217    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221        camera_metadata_enum_android_statistics_face_detect_mode_t,
222        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
225    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
226};
227
228const QCamera3HardwareInterface::QCameraMap<
229        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
230        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
231    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
232      CAM_FOCUS_UNCALIBRATED },
233    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
234      CAM_FOCUS_APPROXIMATE },
235    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
236      CAM_FOCUS_CALIBRATED }
237};
238
239const QCamera3HardwareInterface::QCameraMap<
240        camera_metadata_enum_android_lens_state_t,
241        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
242    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
243    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
244};
245
246const int32_t available_thumbnail_sizes[] = {0, 0,
247                                             176, 144,
248                                             240, 144,
249                                             256, 144,
250                                             240, 160,
251                                             256, 154,
252                                             240, 240,
253                                             320, 240};
254
255const QCamera3HardwareInterface::QCameraMap<
256        camera_metadata_enum_android_sensor_test_pattern_mode_t,
257        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
258    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
259    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
260    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
261    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
262    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
263    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
264};
265
266/* Since there is no mapping for all the options some Android enum are not listed.
267 * Also, the order in this list is important because while mapping from HAL to Android it will
268 * traverse from lower to higher index which means that for HAL values that are map to different
269 * Android values, the traverse logic will select the first one found.
270 */
271const QCamera3HardwareInterface::QCameraMap<
272        camera_metadata_enum_android_sensor_reference_illuminant1_t,
273        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
276    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
277    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
278    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
279    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
280    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
289    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
294    { 60, CAM_HFR_MODE_60FPS},
295    { 90, CAM_HFR_MODE_90FPS},
296    { 120, CAM_HFR_MODE_120FPS},
297    { 150, CAM_HFR_MODE_150FPS},
298    { 180, CAM_HFR_MODE_180FPS},
299    { 210, CAM_HFR_MODE_210FPS},
300    { 240, CAM_HFR_MODE_240FPS},
301    { 480, CAM_HFR_MODE_480FPS},
302};
303
304camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
305    .initialize                         = QCamera3HardwareInterface::initialize,
306    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
307    .register_stream_buffers            = NULL,
308    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
309    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
310    .get_metadata_vendor_tag_ops        = NULL,
311    .dump                               = QCamera3HardwareInterface::dump,
312    .flush                              = QCamera3HardwareInterface::flush,
313    .reserved                           = {0},
314};
315
316// initialise to some default value
317uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
318
319/*===========================================================================
320 * FUNCTION   : QCamera3HardwareInterface
321 *
322 * DESCRIPTION: constructor of QCamera3HardwareInterface
323 *
324 * PARAMETERS :
325 *   @cameraId  : camera ID
326 *
327 * RETURN     : none
328 *==========================================================================*/
329QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
330        const camera_module_callbacks_t *callbacks)
331    : mCameraId(cameraId),
332      mCameraHandle(NULL),
333      mCameraInitialized(false),
334      mCallbackOps(NULL),
335      mMetadataChannel(NULL),
336      mPictureChannel(NULL),
337      mRawChannel(NULL),
338      mSupportChannel(NULL),
339      mAnalysisChannel(NULL),
340      mRawDumpChannel(NULL),
341      mDummyBatchChannel(NULL),
342      m_perfLock(),
343      mCommon(),
344      mChannelHandle(0),
345      mFirstConfiguration(true),
346      mFlush(false),
347      mFlushPerf(false),
348      mParamHeap(NULL),
349      mParameters(NULL),
350      mPrevParameters(NULL),
351      m_bIsVideo(false),
352      m_bIs4KVideo(false),
353      m_bEisSupportedSize(false),
354      m_bEisEnable(false),
355      m_MobicatMask(0),
356      mMinProcessedFrameDuration(0),
357      mMinJpegFrameDuration(0),
358      mMinRawFrameDuration(0),
359      mMetaFrameCount(0U),
360      mUpdateDebugLevel(false),
361      mCallbacks(callbacks),
362      mCaptureIntent(0),
363      mCacMode(0),
364      mHybridAeEnable(0),
365      /* DevCamDebug metadata internal m control*/
366      mDevCamDebugMetaEnable(0),
367      /* DevCamDebug metadata end */
368      mBatchSize(0),
369      mToBeQueuedVidBufs(0),
370      mHFRVideoFps(DEFAULT_VIDEO_FPS),
371      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
372      mFirstFrameNumberInBatch(0),
373      mNeedSensorRestart(false),
374      mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
375      mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
376      mLdafCalibExist(false),
377      mPowerHintEnabled(false),
378      mLastCustIntentFrmNum(-1),
379      mState(CLOSED),
380      mIsDeviceLinked(false),
381      mIsMainCamera(true),
382      mLinkedCameraId(0),
383      m_pRelCamSyncHeap(NULL),
384      m_pRelCamSyncBuf(NULL)
385{
386    getLogLevel();
387    m_perfLock.lock_init();
388    mCommon.init(gCamCapability[cameraId]);
389    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
390    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
391    mCameraDevice.common.close = close_camera_device;
392    mCameraDevice.ops = &mCameraOps;
393    mCameraDevice.priv = this;
394    gCamCapability[cameraId]->version = CAM_HAL_V3;
395    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
396    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
397    gCamCapability[cameraId]->min_num_pp_bufs = 3;
398
399    pthread_cond_init(&mBuffersCond, NULL);
400
401    pthread_cond_init(&mRequestCond, NULL);
402    mPendingLiveRequest = 0;
403    mCurrentRequestId = -1;
404    pthread_mutex_init(&mMutex, NULL);
405
406    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
407        mDefaultMetadata[i] = NULL;
408
409    // Getting system props of different kinds
410    char prop[PROPERTY_VALUE_MAX];
411    memset(prop, 0, sizeof(prop));
412    property_get("persist.camera.raw.dump", prop, "0");
413    mEnableRawDump = atoi(prop);
414    if (mEnableRawDump)
415        LOGD("Raw dump from Camera HAL enabled");
416
417    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
418    memset(mLdafCalib, 0, sizeof(mLdafCalib));
419
420    memset(prop, 0, sizeof(prop));
421    property_get("persist.camera.tnr.preview", prop, "0");
422    m_bTnrPreview = (uint8_t)atoi(prop);
423
424    memset(prop, 0, sizeof(prop));
425    property_get("persist.camera.tnr.video", prop, "0");
426    m_bTnrVideo = (uint8_t)atoi(prop);
427
428    memset(prop, 0, sizeof(prop));
429    property_get("persist.camera.avtimer.debug", prop, "0");
430    m_debug_avtimer = (uint8_t)atoi(prop);
431
432    //Load and read GPU library.
433    lib_surface_utils = NULL;
434    LINK_get_surface_pixel_alignment = NULL;
435    mSurfaceStridePadding = CAM_PAD_TO_32;
436    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
437    if (lib_surface_utils) {
438        *(void **)&LINK_get_surface_pixel_alignment =
439                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
440         if (LINK_get_surface_pixel_alignment) {
441             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
442         }
443         dlclose(lib_surface_utils);
444    }
445
446    m60HzZone = is60HzZone();
447}
448
449/*===========================================================================
450 * FUNCTION   : ~QCamera3HardwareInterface
451 *
452 * DESCRIPTION: destructor of QCamera3HardwareInterface
453 *
454 * PARAMETERS : none
455 *
456 * RETURN     : none
457 *==========================================================================*/
458QCamera3HardwareInterface::~QCamera3HardwareInterface()
459{
460    LOGD("E");
461
462    /* Turn off current power hint before acquiring perfLock in case they
463     * conflict with each other */
464    disablePowerHint();
465
466    m_perfLock.lock_acq();
467
468    /* We need to stop all streams before deleting any stream */
469    if (mRawDumpChannel) {
470        mRawDumpChannel->stop();
471    }
472
473    // NOTE: 'camera3_stream_t *' objects are already freed at
474    //        this stage by the framework
475    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
476        it != mStreamInfo.end(); it++) {
477        QCamera3ProcessingChannel *channel = (*it)->channel;
478        if (channel) {
479            channel->stop();
480        }
481    }
482    if (mSupportChannel)
483        mSupportChannel->stop();
484
485    if (mAnalysisChannel) {
486        mAnalysisChannel->stop();
487    }
488    if (mMetadataChannel) {
489        mMetadataChannel->stop();
490    }
491    if (mChannelHandle) {
492        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
493                mChannelHandle);
494        LOGD("stopping channel %d", mChannelHandle);
495    }
496
497    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
498        it != mStreamInfo.end(); it++) {
499        QCamera3ProcessingChannel *channel = (*it)->channel;
500        if (channel)
501            delete channel;
502        free (*it);
503    }
504    if (mSupportChannel) {
505        delete mSupportChannel;
506        mSupportChannel = NULL;
507    }
508
509    if (mAnalysisChannel) {
510        delete mAnalysisChannel;
511        mAnalysisChannel = NULL;
512    }
513    if (mRawDumpChannel) {
514        delete mRawDumpChannel;
515        mRawDumpChannel = NULL;
516    }
517    if (mDummyBatchChannel) {
518        delete mDummyBatchChannel;
519        mDummyBatchChannel = NULL;
520    }
521    mPictureChannel = NULL;
522
523    if (mMetadataChannel) {
524        delete mMetadataChannel;
525        mMetadataChannel = NULL;
526    }
527
528    /* Clean up all channels */
529    if (mCameraInitialized) {
530        if(!mFirstConfiguration){
531            //send the last unconfigure
532            cam_stream_size_info_t stream_config_info;
533            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
534            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
535            stream_config_info.buffer_info.max_buffers =
536                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
537            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
538                    stream_config_info);
539            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
540            if (rc < 0) {
541                LOGE("set_parms failed for unconfigure");
542            }
543        }
544        deinitParameters();
545    }
546
547    if (mChannelHandle) {
548        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
549                mChannelHandle);
550        LOGH("deleting channel %d", mChannelHandle);
551        mChannelHandle = 0;
552    }
553
554    if (mState != CLOSED)
555        closeCamera();
556
557    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
558        req.mPendingBufferList.clear();
559    }
560    mPendingBuffersMap.mPendingBuffersInRequest.clear();
561    mPendingReprocessResultList.clear();
562    for (pendingRequestIterator i = mPendingRequestsList.begin();
563            i != mPendingRequestsList.end();) {
564        i = erasePendingRequest(i);
565    }
566    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
567        if (mDefaultMetadata[i])
568            free_camera_metadata(mDefaultMetadata[i]);
569
570    m_perfLock.lock_rel();
571    m_perfLock.lock_deinit();
572
573    pthread_cond_destroy(&mRequestCond);
574
575    pthread_cond_destroy(&mBuffersCond);
576
577    pthread_mutex_destroy(&mMutex);
578    LOGD("X");
579}
580
581/*===========================================================================
582 * FUNCTION   : erasePendingRequest
583 *
584 * DESCRIPTION: function to erase a desired pending request after freeing any
585 *              allocated memory
586 *
587 * PARAMETERS :
588 *   @i       : iterator pointing to pending request to be erased
589 *
590 * RETURN     : iterator pointing to the next request
591 *==========================================================================*/
592QCamera3HardwareInterface::pendingRequestIterator
593        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
594{
595    if (i->input_buffer != NULL) {
596        free(i->input_buffer);
597        i->input_buffer = NULL;
598    }
599    if (i->settings != NULL)
600        free_camera_metadata((camera_metadata_t*)i->settings);
601    return mPendingRequestsList.erase(i);
602}
603
604/*===========================================================================
605 * FUNCTION   : camEvtHandle
606 *
607 * DESCRIPTION: Function registered to mm-camera-interface to handle events
608 *
609 * PARAMETERS :
610 *   @camera_handle : interface layer camera handle
611 *   @evt           : ptr to event
612 *   @user_data     : user data ptr
613 *
614 * RETURN     : none
615 *==========================================================================*/
616void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
617                                          mm_camera_event_t *evt,
618                                          void *user_data)
619{
620    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
621    if (obj && evt) {
622        switch(evt->server_event_type) {
623            case CAM_EVENT_TYPE_DAEMON_DIED:
624                pthread_mutex_lock(&obj->mMutex);
625                obj->mState = ERROR;
626                pthread_mutex_unlock(&obj->mMutex);
627                LOGE("Fatal, camera daemon died");
628                break;
629
630            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
631                LOGD("HAL got request pull from Daemon");
632                pthread_mutex_lock(&obj->mMutex);
633                obj->mWokenUpByDaemon = true;
634                obj->unblockRequestIfNecessary();
635                pthread_mutex_unlock(&obj->mMutex);
636                break;
637
638            default:
639                LOGW("Warning: Unhandled event %d",
640                        evt->server_event_type);
641                break;
642        }
643    } else {
644        LOGE("NULL user_data/evt");
645    }
646}
647
648/*===========================================================================
649 * FUNCTION   : openCamera
650 *
651 * DESCRIPTION: open camera
652 *
653 * PARAMETERS :
654 *   @hw_device  : double ptr for camera device struct
655 *
656 * RETURN     : int32_t type of status
657 *              NO_ERROR  -- success
658 *              none-zero failure code
659 *==========================================================================*/
660int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
661{
662    int rc = 0;
663    if (mState != CLOSED) {
664        *hw_device = NULL;
665        return PERMISSION_DENIED;
666    }
667
668    m_perfLock.lock_acq();
669    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
670             mCameraId);
671
672    rc = openCamera();
673    if (rc == 0) {
674        *hw_device = &mCameraDevice.common;
675    } else
676        *hw_device = NULL;
677
678    m_perfLock.lock_rel();
679    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
680             mCameraId, rc);
681
682    if (rc == NO_ERROR) {
683        mState = OPENED;
684    }
685    return rc;
686}
687
688/*===========================================================================
689 * FUNCTION   : openCamera
690 *
691 * DESCRIPTION: open camera
692 *
693 * PARAMETERS : none
694 *
695 * RETURN     : int32_t type of status
696 *              NO_ERROR  -- success
697 *              none-zero failure code
698 *==========================================================================*/
699int QCamera3HardwareInterface::openCamera()
700{
701    int rc = 0;
702    char value[PROPERTY_VALUE_MAX];
703
704    KPI_ATRACE_CALL();
705    if (mCameraHandle) {
706        LOGE("Failure: Camera already opened");
707        return ALREADY_EXISTS;
708    }
709
710    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
711    if (rc < 0) {
712        LOGE("Failed to reserve flash for camera id: %d",
713                mCameraId);
714        return UNKNOWN_ERROR;
715    }
716
717    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
718    if (rc) {
719        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
720        return rc;
721    }
722
723    if (!mCameraHandle) {
724        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
725        return -ENODEV;
726    }
727
728    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
729            camEvtHandle, (void *)this);
730
731    if (rc < 0) {
732        LOGE("Error, failed to register event callback");
733        /* Not closing camera here since it is already handled in destructor */
734        return FAILED_TRANSACTION;
735    }
736
737    mExifParams.debug_params =
738            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
739    if (mExifParams.debug_params) {
740        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
741    } else {
742        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
743        return NO_MEMORY;
744    }
745    mFirstConfiguration = true;
746
747    //Notify display HAL that a camera session is active.
748    //But avoid calling the same during bootup because camera service might open/close
749    //cameras at boot time during its initialization and display service will also internally
750    //wait for camera service to initialize first while calling this display API, resulting in a
751    //deadlock situation. Since boot time camera open/close calls are made only to fetch
752    //capabilities, no need of this display bw optimization.
753    //Use "service.bootanim.exit" property to know boot status.
754    property_get("service.bootanim.exit", value, "0");
755    if (atoi(value) == 1) {
756        pthread_mutex_lock(&gCamLock);
757        if (gNumCameraSessions++ == 0) {
758            setCameraLaunchStatus(true);
759        }
760        pthread_mutex_unlock(&gCamLock);
761    }
762
763    //fill the session id needed while linking dual cam
764    pthread_mutex_lock(&gCamLock);
765    rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
766        &sessionId[mCameraId]);
767    pthread_mutex_unlock(&gCamLock);
768
769    if (rc < 0) {
770        LOGE("Error, failed to get sessiion id");
771        return UNKNOWN_ERROR;
772    } else {
773        //Allocate related cam sync buffer
774        //this is needed for the payload that goes along with bundling cmd for related
775        //camera use cases
776        m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
777        rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
778        if(rc != OK) {
779            rc = NO_MEMORY;
780            LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
781            return NO_MEMORY;
782        }
783
784        //Map memory for related cam sync buffer
785        rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
786                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
787                m_pRelCamSyncHeap->getFd(0),
788                sizeof(cam_sync_related_sensors_event_info_t),
789                m_pRelCamSyncHeap->getPtr(0));
790        if(rc < 0) {
791            LOGE("Dualcam: failed to map Related cam sync buffer");
792            rc = FAILED_TRANSACTION;
793            return NO_MEMORY;
794        }
795        m_pRelCamSyncBuf =
796                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
797    }
798
799    LOGH("mCameraId=%d",mCameraId);
800
801    return NO_ERROR;
802}
803
804/*===========================================================================
805 * FUNCTION   : closeCamera
806 *
807 * DESCRIPTION: close camera
808 *
809 * PARAMETERS : none
810 *
811 * RETURN     : int32_t type of status
812 *              NO_ERROR  -- success
813 *              none-zero failure code
814 *==========================================================================*/
815int QCamera3HardwareInterface::closeCamera()
816{
817    KPI_ATRACE_CALL();
818    int rc = NO_ERROR;
819    char value[PROPERTY_VALUE_MAX];
820
821    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
822             mCameraId);
823    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
824    mCameraHandle = NULL;
825
826    //reset session id to some invalid id
827    pthread_mutex_lock(&gCamLock);
828    sessionId[mCameraId] = 0xDEADBEEF;
829    pthread_mutex_unlock(&gCamLock);
830
831    //Notify display HAL that there is no active camera session
832    //but avoid calling the same during bootup. Refer to openCamera
833    //for more details.
834    property_get("service.bootanim.exit", value, "0");
835    if (atoi(value) == 1) {
836        pthread_mutex_lock(&gCamLock);
837        if (--gNumCameraSessions == 0) {
838            setCameraLaunchStatus(false);
839        }
840        pthread_mutex_unlock(&gCamLock);
841    }
842
843    if (NULL != m_pRelCamSyncHeap) {
844        m_pRelCamSyncHeap->deallocate();
845        delete m_pRelCamSyncHeap;
846        m_pRelCamSyncHeap = NULL;
847        m_pRelCamSyncBuf = NULL;
848    }
849
850    if (mExifParams.debug_params) {
851        free(mExifParams.debug_params);
852        mExifParams.debug_params = NULL;
853    }
854    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
855        LOGW("Failed to release flash for camera id: %d",
856                mCameraId);
857    }
858    mState = CLOSED;
859    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
860         mCameraId, rc);
861    return rc;
862}
863
864/*===========================================================================
865 * FUNCTION   : initialize
866 *
867 * DESCRIPTION: Initialize frameworks callback functions
868 *
869 * PARAMETERS :
870 *   @callback_ops : callback function to frameworks
871 *
872 * RETURN     :
873 *
874 *==========================================================================*/
875int QCamera3HardwareInterface::initialize(
876        const struct camera3_callback_ops *callback_ops)
877{
878    ATRACE_CALL();
879    int rc;
880
881    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
882    pthread_mutex_lock(&mMutex);
883
884    // Validate current state
885    switch (mState) {
886        case OPENED:
887            /* valid state */
888            break;
889        default:
890            LOGE("Invalid state %d", mState);
891            rc = -ENODEV;
892            goto err1;
893    }
894
895    rc = initParameters();
896    if (rc < 0) {
897        LOGE("initParamters failed %d", rc);
898        goto err1;
899    }
900    mCallbackOps = callback_ops;
901
902    mChannelHandle = mCameraHandle->ops->add_channel(
903            mCameraHandle->camera_handle, NULL, NULL, this);
904    if (mChannelHandle == 0) {
905        LOGE("add_channel failed");
906        rc = -ENOMEM;
907        pthread_mutex_unlock(&mMutex);
908        return rc;
909    }
910
911    pthread_mutex_unlock(&mMutex);
912    mCameraInitialized = true;
913    mState = INITIALIZED;
914    LOGI("X");
915    return 0;
916
917err1:
918    pthread_mutex_unlock(&mMutex);
919    return rc;
920}
921
922/*===========================================================================
923 * FUNCTION   : validateStreamDimensions
924 *
925 * DESCRIPTION: Check if the configuration requested are those advertised
926 *
927 * PARAMETERS :
928 *   @stream_list : streams to be configured
929 *
930 * RETURN     :
931 *
932 *==========================================================================*/
933int QCamera3HardwareInterface::validateStreamDimensions(
934        camera3_stream_configuration_t *streamList)
935{
936    int rc = NO_ERROR;
937    size_t count = 0;
938
939    camera3_stream_t *inputStream = NULL;
940    /*
941    * Loop through all streams to find input stream if it exists*
942    */
943    for (size_t i = 0; i< streamList->num_streams; i++) {
944        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
945            if (inputStream != NULL) {
946                LOGE("Error, Multiple input streams requested");
947                return -EINVAL;
948            }
949            inputStream = streamList->streams[i];
950        }
951    }
952    /*
953    * Loop through all streams requested in configuration
954    * Check if unsupported sizes have been requested on any of them
955    */
956    for (size_t j = 0; j < streamList->num_streams; j++) {
957        bool sizeFound = false;
958        camera3_stream_t *newStream = streamList->streams[j];
959
960        uint32_t rotatedHeight = newStream->height;
961        uint32_t rotatedWidth = newStream->width;
962        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
963                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
964            rotatedHeight = newStream->width;
965            rotatedWidth = newStream->height;
966        }
967
968        /*
969        * Sizes are different for each type of stream format check against
970        * appropriate table.
971        */
972        switch (newStream->format) {
973        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
974        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
975        case HAL_PIXEL_FORMAT_RAW10:
976            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
977            for (size_t i = 0; i < count; i++) {
978                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
979                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
980                    sizeFound = true;
981                    break;
982                }
983            }
984            break;
985        case HAL_PIXEL_FORMAT_BLOB:
986            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
987            /* Verify set size against generated sizes table */
988            for (size_t i = 0; i < count; i++) {
989                if (((int32_t)rotatedWidth ==
990                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
991                        ((int32_t)rotatedHeight ==
992                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
993                    sizeFound = true;
994                    break;
995                }
996            }
997            break;
998        case HAL_PIXEL_FORMAT_YCbCr_420_888:
999        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1000        default:
1001            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1002                    || newStream->stream_type == CAMERA3_STREAM_INPUT
1003                    || IS_USAGE_ZSL(newStream->usage)) {
1004                if (((int32_t)rotatedWidth ==
1005                                gCamCapability[mCameraId]->active_array_size.width) &&
1006                                ((int32_t)rotatedHeight ==
1007                                gCamCapability[mCameraId]->active_array_size.height)) {
1008                    sizeFound = true;
1009                    break;
1010                }
1011                /* We could potentially break here to enforce ZSL stream
1012                 * set from frameworks always is full active array size
1013                 * but it is not clear from the spc if framework will always
1014                 * follow that, also we have logic to override to full array
1015                 * size, so keeping the logic lenient at the moment
1016                 */
1017            }
1018            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1019                    MAX_SIZES_CNT);
1020            for (size_t i = 0; i < count; i++) {
1021                if (((int32_t)rotatedWidth ==
1022                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1023                            ((int32_t)rotatedHeight ==
1024                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1025                    sizeFound = true;
1026                    break;
1027                }
1028            }
1029            break;
1030        } /* End of switch(newStream->format) */
1031
1032        /* We error out even if a single stream has unsupported size set */
1033        if (!sizeFound) {
1034            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1035                    rotatedWidth, rotatedHeight, newStream->format,
1036                    gCamCapability[mCameraId]->active_array_size.width,
1037                    gCamCapability[mCameraId]->active_array_size.height);
1038            rc = -EINVAL;
1039            break;
1040        }
1041    } /* End of for each stream */
1042    return rc;
1043}
1044
1045/*==============================================================================
1046 * FUNCTION   : isSupportChannelNeeded
1047 *
1048 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1049 *
1050 * PARAMETERS :
1051 *   @stream_list : streams to be configured
1052 *   @stream_config_info : the config info for streams to be configured
1053 *
1054 * RETURN     : Boolen true/false decision
1055 *
1056 *==========================================================================*/
1057bool QCamera3HardwareInterface::isSupportChannelNeeded(
1058        camera3_stream_configuration_t *streamList,
1059        cam_stream_size_info_t stream_config_info)
1060{
1061    uint32_t i;
1062    bool pprocRequested = false;
1063    /* Check for conditions where PProc pipeline does not have any streams*/
1064    for (i = 0; i < stream_config_info.num_streams; i++) {
1065        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1066                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1067            pprocRequested = true;
1068            break;
1069        }
1070    }
1071
1072    if (pprocRequested == false )
1073        return true;
1074
1075    /* Dummy stream needed if only raw or jpeg streams present */
1076    for (i = 0; i < streamList->num_streams; i++) {
1077        switch(streamList->streams[i]->format) {
1078            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1079            case HAL_PIXEL_FORMAT_RAW10:
1080            case HAL_PIXEL_FORMAT_RAW16:
1081            case HAL_PIXEL_FORMAT_BLOB:
1082                break;
1083            default:
1084                return false;
1085        }
1086    }
1087    return true;
1088}
1089
1090/*==============================================================================
1091 * FUNCTION   : getSensorOutputSize
1092 *
1093 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1094 *
1095 * PARAMETERS :
1096 *   @sensor_dim : sensor output dimension (output)
1097 *
1098 * RETURN     : int32_t type of status
1099 *              NO_ERROR  -- success
1100 *              none-zero failure code
1101 *
1102 *==========================================================================*/
1103int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1104{
1105    int32_t rc = NO_ERROR;
1106
1107    cam_dimension_t max_dim = {0, 0};
1108    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1109        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1110            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1111        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1112            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1113    }
1114
1115    clear_metadata_buffer(mParameters);
1116
1117    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1118            max_dim);
1119    if (rc != NO_ERROR) {
1120        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1121        return rc;
1122    }
1123
1124    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1125    if (rc != NO_ERROR) {
1126        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1127        return rc;
1128    }
1129
1130    clear_metadata_buffer(mParameters);
1131    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1132
1133    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1134            mParameters);
1135    if (rc != NO_ERROR) {
1136        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1137        return rc;
1138    }
1139
1140    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1141    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1142
1143    return rc;
1144}
1145
1146/*==============================================================================
1147 * FUNCTION   : enablePowerHint
1148 *
1149 * DESCRIPTION: enable single powerhint for preview and different video modes.
1150 *
1151 * PARAMETERS :
1152 *
1153 * RETURN     : NULL
1154 *
1155 *==========================================================================*/
1156void QCamera3HardwareInterface::enablePowerHint()
1157{
1158    if (!mPowerHintEnabled) {
1159        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1160        mPowerHintEnabled = true;
1161    }
1162}
1163
1164/*==============================================================================
1165 * FUNCTION   : disablePowerHint
1166 *
1167 * DESCRIPTION: disable current powerhint.
1168 *
1169 * PARAMETERS :
1170 *
1171 * RETURN     : NULL
1172 *
1173 *==========================================================================*/
1174void QCamera3HardwareInterface::disablePowerHint()
1175{
1176    if (mPowerHintEnabled) {
1177        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1178        mPowerHintEnabled = false;
1179    }
1180}
1181
1182/*==============================================================================
1183 * FUNCTION   : addToPPFeatureMask
1184 *
1185 * DESCRIPTION: add additional features to pp feature mask based on
1186 *              stream type and usecase
1187 *
1188 * PARAMETERS :
1189 *   @stream_format : stream type for feature mask
1190 *   @stream_idx : stream idx within postprocess_mask list to change
1191 *
1192 * RETURN     : NULL
1193 *
1194 *==========================================================================*/
1195void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1196        uint32_t stream_idx)
1197{
1198    char feature_mask_value[PROPERTY_VALUE_MAX];
1199    cam_feature_mask_t feature_mask;
1200    int args_converted;
1201    int property_len;
1202
1203    /* Get feature mask from property */
1204    property_len = property_get("persist.camera.hal3.feature",
1205            feature_mask_value, "0");
1206    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1207            (feature_mask_value[1] == 'x')) {
1208        args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1209    } else {
1210        args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1211    }
1212    if (1 != args_converted) {
1213        feature_mask = 0;
1214        LOGE("Wrong feature mask %s", feature_mask_value);
1215        return;
1216    }
1217
1218    switch (stream_format) {
1219    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1220        /* Add LLVD to pp feature mask only if video hint is enabled */
1221        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1222            mStreamConfigInfo.postprocess_mask[stream_idx]
1223                    |= CAM_QTI_FEATURE_SW_TNR;
1224            LOGH("Added SW TNR to pp feature mask");
1225        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1226            mStreamConfigInfo.postprocess_mask[stream_idx]
1227                    |= CAM_QCOM_FEATURE_LLVD;
1228            LOGH("Added LLVD SeeMore to pp feature mask");
1229        }
1230        break;
1231    }
1232    default:
1233        break;
1234    }
1235    LOGD("PP feature mask %llx",
1236            mStreamConfigInfo.postprocess_mask[stream_idx]);
1237}
1238
1239/*==============================================================================
1240 * FUNCTION   : updateFpsInPreviewBuffer
1241 *
1242 * DESCRIPTION: update FPS information in preview buffer.
1243 *
1244 * PARAMETERS :
1245 *   @metadata    : pointer to metadata buffer
1246 *   @frame_number: frame_number to look for in pending buffer list
1247 *
1248 * RETURN     : None
1249 *
1250 *==========================================================================*/
1251void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1252        uint32_t frame_number)
1253{
1254    // Mark all pending buffers for this particular request
1255    // with corresponding framerate information
1256    for (List<PendingBuffersInRequest>::iterator req =
1257            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1258            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1259        for(List<PendingBufferInfo>::iterator j =
1260                req->mPendingBufferList.begin();
1261                j != req->mPendingBufferList.end(); j++) {
1262            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1263            if ((req->frame_number == frame_number) &&
1264                (channel->getStreamTypeMask() &
1265                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1266                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1267                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1268                    int32_t cameraFps = float_range->max_fps;
1269                    struct private_handle_t *priv_handle =
1270                        (struct private_handle_t *)(*(j->buffer));
1271                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1272                }
1273            }
1274        }
1275    }
1276}
1277
1278/*==============================================================================
1279 * FUNCTION   : updateTimeStampInPendingBuffers
1280 *
1281 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1282 *              of a frame number
1283 *
1284 * PARAMETERS :
1285 *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1286 *   @timestamp   : timestamp to be set
1287 *
1288 * RETURN     : None
1289 *
1290 *==========================================================================*/
1291void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1292        uint32_t frameNumber, nsecs_t timestamp)
1293{
1294    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1295            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1296        if (req->frame_number != frameNumber)
1297            continue;
1298
1299        for (auto k = req->mPendingBufferList.begin();
1300                k != req->mPendingBufferList.end(); k++ ) {
1301            struct private_handle_t *priv_handle =
1302                    (struct private_handle_t *) (*(k->buffer));
1303            setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1304        }
1305    }
1306    return;
1307}
1308
1309/*===========================================================================
1310 * FUNCTION   : configureStreams
1311 *
1312 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1313 *              and output streams.
1314 *
1315 * PARAMETERS :
1316 *   @stream_list : streams to be configured
1317 *
1318 * RETURN     :
1319 *
1320 *==========================================================================*/
1321int QCamera3HardwareInterface::configureStreams(
1322        camera3_stream_configuration_t *streamList)
1323{
1324    ATRACE_CALL();
1325    int rc = 0;
1326
1327    // Acquire perfLock before configure streams
1328    m_perfLock.lock_acq();
1329    rc = configureStreamsPerfLocked(streamList);
1330    m_perfLock.lock_rel();
1331
1332    return rc;
1333}
1334
1335/*===========================================================================
1336 * FUNCTION   : configureStreamsPerfLocked
1337 *
1338 * DESCRIPTION: configureStreams while perfLock is held.
1339 *
1340 * PARAMETERS :
1341 *   @stream_list : streams to be configured
1342 *
1343 * RETURN     : int32_t type of status
1344 *              NO_ERROR  -- success
1345 *              none-zero failure code
1346 *==========================================================================*/
1347int QCamera3HardwareInterface::configureStreamsPerfLocked(
1348        camera3_stream_configuration_t *streamList)
1349{
1350    ATRACE_CALL();
1351    int rc = 0;
1352
1353    // Sanity check stream_list
1354    if (streamList == NULL) {
1355        LOGE("NULL stream configuration");
1356        return BAD_VALUE;
1357    }
1358    if (streamList->streams == NULL) {
1359        LOGE("NULL stream list");
1360        return BAD_VALUE;
1361    }
1362
1363    if (streamList->num_streams < 1) {
1364        LOGE("Bad number of streams requested: %d",
1365                streamList->num_streams);
1366        return BAD_VALUE;
1367    }
1368
1369    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1370        LOGE("Maximum number of streams %d exceeded: %d",
1371                MAX_NUM_STREAMS, streamList->num_streams);
1372        return BAD_VALUE;
1373    }
1374
1375    mOpMode = streamList->operation_mode;
1376    LOGD("mOpMode: %d", mOpMode);
1377
1378    /* first invalidate all the steams in the mStreamList
1379     * if they appear again, they will be validated */
1380    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1381            it != mStreamInfo.end(); it++) {
1382        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1383        if (channel) {
1384          channel->stop();
1385        }
1386        (*it)->status = INVALID;
1387    }
1388
1389    if (mRawDumpChannel) {
1390        mRawDumpChannel->stop();
1391        delete mRawDumpChannel;
1392        mRawDumpChannel = NULL;
1393    }
1394
1395    if (mSupportChannel)
1396        mSupportChannel->stop();
1397
1398    if (mAnalysisChannel) {
1399        mAnalysisChannel->stop();
1400    }
1401    if (mMetadataChannel) {
1402        /* If content of mStreamInfo is not 0, there is metadata stream */
1403        mMetadataChannel->stop();
1404    }
1405    if (mChannelHandle) {
1406        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1407                mChannelHandle);
1408        LOGD("stopping channel %d", mChannelHandle);
1409    }
1410
1411    pthread_mutex_lock(&mMutex);
1412
1413    // Check state
1414    switch (mState) {
1415        case INITIALIZED:
1416        case CONFIGURED:
1417        case STARTED:
1418            /* valid state */
1419            break;
1420        default:
1421            LOGE("Invalid state %d", mState);
1422            pthread_mutex_unlock(&mMutex);
1423            return -ENODEV;
1424    }
1425
1426    /* Check whether we have video stream */
1427    m_bIs4KVideo = false;
1428    m_bIsVideo = false;
1429    m_bEisSupportedSize = false;
1430    m_bTnrEnabled = false;
1431    bool isZsl = false;
1432    uint32_t videoWidth = 0U;
1433    uint32_t videoHeight = 0U;
1434    size_t rawStreamCnt = 0;
1435    size_t stallStreamCnt = 0;
1436    size_t processedStreamCnt = 0;
1437    // Number of streams on ISP encoder path
1438    size_t numStreamsOnEncoder = 0;
1439    size_t numYuv888OnEncoder = 0;
1440    bool bYuv888OverrideJpeg = false;
1441    cam_dimension_t largeYuv888Size = {0, 0};
1442    cam_dimension_t maxViewfinderSize = {0, 0};
1443    bool bJpegExceeds4K = false;
1444    bool bJpegOnEncoder = false;
1445    bool bUseCommonFeatureMask = false;
1446    cam_feature_mask_t commonFeatureMask = 0;
1447    bool bSmallJpegSize = false;
1448    uint32_t width_ratio;
1449    uint32_t height_ratio;
1450    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1451    camera3_stream_t *inputStream = NULL;
1452    bool isJpeg = false;
1453    cam_dimension_t jpegSize = {0, 0};
1454
1455    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1456
1457    /*EIS configuration*/
1458    bool eisSupported = false;
1459    bool oisSupported = false;
1460    int32_t margin_index = -1;
1461    uint8_t eis_prop_set;
1462    uint32_t maxEisWidth = 0;
1463    uint32_t maxEisHeight = 0;
1464
1465    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1466
1467    size_t count = IS_TYPE_MAX;
1468    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1469    for (size_t i = 0; i < count; i++) {
1470        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1471            eisSupported = true;
1472            margin_index = (int32_t)i;
1473            break;
1474        }
1475    }
1476
1477    count = CAM_OPT_STAB_MAX;
1478    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1479    for (size_t i = 0; i < count; i++) {
1480        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1481            oisSupported = true;
1482            break;
1483        }
1484    }
1485
1486    if (eisSupported) {
1487        maxEisWidth = MAX_EIS_WIDTH;
1488        maxEisHeight = MAX_EIS_HEIGHT;
1489    }
1490
1491    /* EIS setprop control */
1492    char eis_prop[PROPERTY_VALUE_MAX];
1493    memset(eis_prop, 0, sizeof(eis_prop));
1494    property_get("persist.camera.eis.enable", eis_prop, "0");
1495    eis_prop_set = (uint8_t)atoi(eis_prop);
1496
1497    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1498            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1499
1500    /* stream configurations */
1501    for (size_t i = 0; i < streamList->num_streams; i++) {
1502        camera3_stream_t *newStream = streamList->streams[i];
1503        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1504                "height = %d, rotation = %d, usage = 0x%x",
1505                 i, newStream->stream_type, newStream->format,
1506                newStream->width, newStream->height, newStream->rotation,
1507                newStream->usage);
1508        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1509                newStream->stream_type == CAMERA3_STREAM_INPUT){
1510            isZsl = true;
1511        }
1512        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1513            inputStream = newStream;
1514        }
1515
1516        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1517            isJpeg = true;
1518            jpegSize.width = newStream->width;
1519            jpegSize.height = newStream->height;
1520            if (newStream->width > VIDEO_4K_WIDTH ||
1521                    newStream->height > VIDEO_4K_HEIGHT)
1522                bJpegExceeds4K = true;
1523        }
1524
1525        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1526                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1527            m_bIsVideo = true;
1528            videoWidth = newStream->width;
1529            videoHeight = newStream->height;
1530            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1531                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1532                m_bIs4KVideo = true;
1533            }
1534            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1535                                  (newStream->height <= maxEisHeight);
1536        }
1537        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1538                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1539            switch (newStream->format) {
1540            case HAL_PIXEL_FORMAT_BLOB:
1541                stallStreamCnt++;
1542                if (isOnEncoder(maxViewfinderSize, newStream->width,
1543                        newStream->height)) {
1544                    numStreamsOnEncoder++;
1545                    bJpegOnEncoder = true;
1546                }
1547                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1548                        newStream->width);
1549                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1550                        newStream->height);;
1551                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1552                        "FATAL: max_downscale_factor cannot be zero and so assert");
1553                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1554                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1555                    LOGH("Setting small jpeg size flag to true");
1556                    bSmallJpegSize = true;
1557                }
1558                break;
1559            case HAL_PIXEL_FORMAT_RAW10:
1560            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1561            case HAL_PIXEL_FORMAT_RAW16:
1562                rawStreamCnt++;
1563                break;
1564            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1565                processedStreamCnt++;
1566                if (isOnEncoder(maxViewfinderSize, newStream->width,
1567                        newStream->height)) {
1568                    if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1569                            !IS_USAGE_ZSL(newStream->usage)) {
1570                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1571                    }
1572                    numStreamsOnEncoder++;
1573                }
1574                break;
1575            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1576                processedStreamCnt++;
1577                if (isOnEncoder(maxViewfinderSize, newStream->width,
1578                        newStream->height)) {
1579                    // If Yuv888 size is not greater than 4K, set feature mask
1580                    // to SUPERSET so that it support concurrent request on
1581                    // YUV and JPEG.
1582                    if (newStream->width <= VIDEO_4K_WIDTH &&
1583                            newStream->height <= VIDEO_4K_HEIGHT) {
1584                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1585                    }
1586                    numStreamsOnEncoder++;
1587                    numYuv888OnEncoder++;
1588                    largeYuv888Size.width = newStream->width;
1589                    largeYuv888Size.height = newStream->height;
1590                }
1591                break;
1592            default:
1593                processedStreamCnt++;
1594                if (isOnEncoder(maxViewfinderSize, newStream->width,
1595                        newStream->height)) {
1596                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1597                    numStreamsOnEncoder++;
1598                }
1599                break;
1600            }
1601
1602        }
1603    }
1604
1605    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1606        !m_bIsVideo) {
1607        m_bEisEnable = false;
1608    }
1609
1610    /* Logic to enable/disable TNR based on specific config size/etc.*/
1611    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1612            ((videoWidth == 1920 && videoHeight == 1080) ||
1613            (videoWidth == 1280 && videoHeight == 720)) &&
1614            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1615        m_bTnrEnabled = true;
1616
1617    /* Check if num_streams is sane */
1618    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1619            rawStreamCnt > MAX_RAW_STREAMS ||
1620            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1621        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1622                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1623        pthread_mutex_unlock(&mMutex);
1624        return -EINVAL;
1625    }
1626    /* Check whether we have zsl stream or 4k video case */
1627    if (isZsl && m_bIsVideo) {
1628        LOGE("Currently invalid configuration ZSL&Video!");
1629        pthread_mutex_unlock(&mMutex);
1630        return -EINVAL;
1631    }
1632    /* Check if stream sizes are sane */
1633    if (numStreamsOnEncoder > 2) {
1634        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1635        pthread_mutex_unlock(&mMutex);
1636        return -EINVAL;
1637    } else if (1 < numStreamsOnEncoder){
1638        bUseCommonFeatureMask = true;
1639        LOGH("Multiple streams above max viewfinder size, common mask needed");
1640    }
1641
1642    /* Check if BLOB size is greater than 4k in 4k recording case */
1643    if (m_bIs4KVideo && bJpegExceeds4K) {
1644        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1645        pthread_mutex_unlock(&mMutex);
1646        return -EINVAL;
1647    }
1648
1649    // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1650    // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1651    // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1652    // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1653    // configurations:
1654    //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1655    //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1656    //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1657    if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1658        ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1659                __func__);
1660        pthread_mutex_unlock(&mMutex);
1661        return -EINVAL;
1662    }
1663
1664    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1665    // the YUV stream's size is greater or equal to the JPEG size, set common
1666    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1667    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1668            jpegSize.width, jpegSize.height) &&
1669            largeYuv888Size.width > jpegSize.width &&
1670            largeYuv888Size.height > jpegSize.height) {
1671        bYuv888OverrideJpeg = true;
1672    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1673        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1674    }
1675
1676    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1677            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1678            commonFeatureMask);
1679    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1680            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1681
1682    rc = validateStreamDimensions(streamList);
1683    if (rc == NO_ERROR) {
1684        rc = validateStreamRotations(streamList);
1685    }
1686    if (rc != NO_ERROR) {
1687        LOGE("Invalid stream configuration requested!");
1688        pthread_mutex_unlock(&mMutex);
1689        return rc;
1690    }
1691
1692    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1693    camera3_stream_t *jpegStream = NULL;
1694    for (size_t i = 0; i < streamList->num_streams; i++) {
1695        camera3_stream_t *newStream = streamList->streams[i];
1696        LOGH("newStream type = %d, stream format = %d "
1697                "stream size : %d x %d, stream rotation = %d",
1698                 newStream->stream_type, newStream->format,
1699                newStream->width, newStream->height, newStream->rotation);
1700        //if the stream is in the mStreamList validate it
1701        bool stream_exists = false;
1702        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1703                it != mStreamInfo.end(); it++) {
1704            if ((*it)->stream == newStream) {
1705                QCamera3ProcessingChannel *channel =
1706                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1707                stream_exists = true;
1708                if (channel)
1709                    delete channel;
1710                (*it)->status = VALID;
1711                (*it)->stream->priv = NULL;
1712                (*it)->channel = NULL;
1713            }
1714        }
1715        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1716            //new stream
1717            stream_info_t* stream_info;
1718            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1719            if (!stream_info) {
1720               LOGE("Could not allocate stream info");
1721               rc = -ENOMEM;
1722               pthread_mutex_unlock(&mMutex);
1723               return rc;
1724            }
1725            stream_info->stream = newStream;
1726            stream_info->status = VALID;
1727            stream_info->channel = NULL;
1728            mStreamInfo.push_back(stream_info);
1729        }
1730        /* Covers Opaque ZSL and API1 F/W ZSL */
1731        if (IS_USAGE_ZSL(newStream->usage)
1732                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1733            if (zslStream != NULL) {
1734                LOGE("Multiple input/reprocess streams requested!");
1735                pthread_mutex_unlock(&mMutex);
1736                return BAD_VALUE;
1737            }
1738            zslStream = newStream;
1739        }
1740        /* Covers YUV reprocess */
1741        if (inputStream != NULL) {
1742            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1743                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1744                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1745                    && inputStream->width == newStream->width
1746                    && inputStream->height == newStream->height) {
1747                if (zslStream != NULL) {
1748                    /* This scenario indicates multiple YUV streams with same size
1749                     * as input stream have been requested, since zsl stream handle
1750                     * is solely use for the purpose of overriding the size of streams
1751                     * which share h/w streams we will just make a guess here as to
1752                     * which of the stream is a ZSL stream, this will be refactored
1753                     * once we make generic logic for streams sharing encoder output
1754                     */
1755                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1756                }
1757                zslStream = newStream;
1758            }
1759        }
1760        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1761            jpegStream = newStream;
1762        }
1763    }
1764
1765    /* If a zsl stream is set, we know that we have configured at least one input or
1766       bidirectional stream */
1767    if (NULL != zslStream) {
1768        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1769        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1770        mInputStreamInfo.format = zslStream->format;
1771        mInputStreamInfo.usage = zslStream->usage;
1772        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1773                 mInputStreamInfo.dim.width,
1774                mInputStreamInfo.dim.height,
1775                mInputStreamInfo.format, mInputStreamInfo.usage);
1776    }
1777
1778    cleanAndSortStreamInfo();
1779    if (mMetadataChannel) {
1780        delete mMetadataChannel;
1781        mMetadataChannel = NULL;
1782    }
1783    if (mSupportChannel) {
1784        delete mSupportChannel;
1785        mSupportChannel = NULL;
1786    }
1787
1788    if (mAnalysisChannel) {
1789        delete mAnalysisChannel;
1790        mAnalysisChannel = NULL;
1791    }
1792
1793    if (mDummyBatchChannel) {
1794        delete mDummyBatchChannel;
1795        mDummyBatchChannel = NULL;
1796    }
1797
1798    //Create metadata channel and initialize it
1799    cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1800    setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1801            gCamCapability[mCameraId]->color_arrangement);
1802    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1803                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1804                    setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
1805    if (mMetadataChannel == NULL) {
1806        LOGE("failed to allocate metadata channel");
1807        rc = -ENOMEM;
1808        pthread_mutex_unlock(&mMutex);
1809        return rc;
1810    }
1811    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1812    if (rc < 0) {
1813        LOGE("metadata channel initialization failed");
1814        delete mMetadataChannel;
1815        mMetadataChannel = NULL;
1816        pthread_mutex_unlock(&mMutex);
1817        return rc;
1818    }
1819
1820    // Create analysis stream all the time, even when h/w support is not available
1821    {
1822        cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1823        cam_analysis_info_t analysisInfo;
1824        rc = mCommon.getAnalysisInfo(
1825                FALSE,
1826                TRUE,
1827                analysisFeatureMask,
1828                &analysisInfo);
1829        if (rc != NO_ERROR) {
1830            LOGE("getAnalysisInfo failed, ret = %d", rc);
1831            pthread_mutex_unlock(&mMutex);
1832            return rc;
1833        }
1834
1835        cam_color_filter_arrangement_t analysis_color_arrangement =
1836                (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
1837                CAM_FILTER_ARRANGEMENT_Y :
1838                gCamCapability[mCameraId]->color_arrangement);
1839        setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1840                analysis_color_arrangement);
1841
1842        mAnalysisChannel = new QCamera3SupportChannel(
1843                mCameraHandle->camera_handle,
1844                mChannelHandle,
1845                mCameraHandle->ops,
1846                &analysisInfo.analysis_padding_info,
1847                analysisFeatureMask,
1848                CAM_STREAM_TYPE_ANALYSIS,
1849                &analysisInfo.analysis_max_res,
1850                (analysisInfo.analysis_format
1851                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1852                : CAM_FORMAT_YUV_420_NV21),
1853                analysisInfo.hw_analysis_supported,
1854                this,
1855                0); // force buffer count to 0
1856        if (!mAnalysisChannel) {
1857            LOGE("H/W Analysis channel cannot be created");
1858            pthread_mutex_unlock(&mMutex);
1859            return -ENOMEM;
1860        }
1861    }
1862
1863    bool isRawStreamRequested = false;
1864    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1865    /* Allocate channel objects for the requested streams */
1866    for (size_t i = 0; i < streamList->num_streams; i++) {
1867        camera3_stream_t *newStream = streamList->streams[i];
1868        uint32_t stream_usage = newStream->usage;
1869        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1870        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1871        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1872                || IS_USAGE_ZSL(newStream->usage)) &&
1873            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1874            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1875            if (bUseCommonFeatureMask) {
1876                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1877                        commonFeatureMask;
1878            } else {
1879                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1880                        CAM_QCOM_FEATURE_NONE;
1881            }
1882
1883        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1884                LOGH("Input stream configured, reprocess config");
1885        } else {
1886            //for non zsl streams find out the format
1887            switch (newStream->format) {
1888            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1889            {
1890                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1891                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1892                /* add additional features to pp feature mask */
1893                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1894                        mStreamConfigInfo.num_streams);
1895
1896                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1897                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1898                                CAM_STREAM_TYPE_VIDEO;
1899                    if (m_bTnrEnabled && m_bTnrVideo) {
1900                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1901                            CAM_QCOM_FEATURE_CPP_TNR;
1902                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1903                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1904                                ~CAM_QCOM_FEATURE_CDS;
1905                    }
1906                } else {
1907                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1908                            CAM_STREAM_TYPE_PREVIEW;
1909                    if (m_bTnrEnabled && m_bTnrPreview) {
1910                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1911                                CAM_QCOM_FEATURE_CPP_TNR;
1912                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1913                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1914                                ~CAM_QCOM_FEATURE_CDS;
1915                    }
1916                    padding_info.width_padding = mSurfaceStridePadding;
1917                    padding_info.height_padding = CAM_PAD_TO_2;
1918                }
1919                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1920                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1921                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1922                            newStream->height;
1923                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1924                            newStream->width;
1925                }
1926            }
1927            break;
1928            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1929                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1930                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1931                    if (bUseCommonFeatureMask)
1932                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1933                                commonFeatureMask;
1934                    else
1935                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1936                                CAM_QCOM_FEATURE_NONE;
1937                } else {
1938                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1939                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1940                }
1941            break;
1942            case HAL_PIXEL_FORMAT_BLOB:
1943                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1944                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1945                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1946                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1947                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1948                } else {
1949                    if (bUseCommonFeatureMask &&
1950                            isOnEncoder(maxViewfinderSize, newStream->width,
1951                            newStream->height)) {
1952                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1953                    } else {
1954                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1955                    }
1956                }
1957                if (isZsl) {
1958                    if (zslStream) {
1959                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1960                                (int32_t)zslStream->width;
1961                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1962                                (int32_t)zslStream->height;
1963                    } else {
1964                        LOGE("Error, No ZSL stream identified");
1965                        pthread_mutex_unlock(&mMutex);
1966                        return -EINVAL;
1967                    }
1968                } else if (m_bIs4KVideo) {
1969                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1970                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1971                } else if (bYuv888OverrideJpeg) {
1972                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1973                            (int32_t)largeYuv888Size.width;
1974                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1975                            (int32_t)largeYuv888Size.height;
1976                }
1977                break;
1978            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1979            case HAL_PIXEL_FORMAT_RAW16:
1980            case HAL_PIXEL_FORMAT_RAW10:
1981                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1982                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1983                isRawStreamRequested = true;
1984                break;
1985            default:
1986                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1987                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1988                break;
1989            }
1990        }
1991
1992        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1993                (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1994                gCamCapability[mCameraId]->color_arrangement);
1995
1996        if (newStream->priv == NULL) {
1997            //New stream, construct channel
1998            switch (newStream->stream_type) {
1999            case CAMERA3_STREAM_INPUT:
2000                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2001                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2002                break;
2003            case CAMERA3_STREAM_BIDIRECTIONAL:
2004                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2005                    GRALLOC_USAGE_HW_CAMERA_WRITE;
2006                break;
2007            case CAMERA3_STREAM_OUTPUT:
2008                /* For video encoding stream, set read/write rarely
2009                 * flag so that they may be set to un-cached */
2010                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2011                    newStream->usage |=
2012                         (GRALLOC_USAGE_SW_READ_RARELY |
2013                         GRALLOC_USAGE_SW_WRITE_RARELY |
2014                         GRALLOC_USAGE_HW_CAMERA_WRITE);
2015                else if (IS_USAGE_ZSL(newStream->usage))
2016                {
2017                    LOGD("ZSL usage flag skipping");
2018                }
2019                else if (newStream == zslStream
2020                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2021                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2022                } else
2023                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2024                break;
2025            default:
2026                LOGE("Invalid stream_type %d", newStream->stream_type);
2027                break;
2028            }
2029
2030            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2031                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2032                QCamera3ProcessingChannel *channel = NULL;
2033                switch (newStream->format) {
2034                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2035                    if ((newStream->usage &
2036                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2037                            (streamList->operation_mode ==
2038                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2039                    ) {
2040                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2041                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2042                                setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2043                                this,
2044                                newStream,
2045                                (cam_stream_type_t)
2046                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2047                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2048                                mMetadataChannel,
2049                                0); //heap buffers are not required for HFR video channel
2050                        if (channel == NULL) {
2051                            LOGE("allocation of channel failed");
2052                            pthread_mutex_unlock(&mMutex);
2053                            return -ENOMEM;
2054                        }
2055                        //channel->getNumBuffers() will return 0 here so use
2056                        //MAX_INFLIGH_HFR_REQUESTS
2057                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2058                        newStream->priv = channel;
2059                        LOGI("num video buffers in HFR mode: %d",
2060                                 MAX_INFLIGHT_HFR_REQUESTS);
2061                    } else {
2062                        /* Copy stream contents in HFR preview only case to create
2063                         * dummy batch channel so that sensor streaming is in
2064                         * HFR mode */
2065                        if (!m_bIsVideo && (streamList->operation_mode ==
2066                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2067                            mDummyBatchStream = *newStream;
2068                        }
2069                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2070                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2071                                setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2072                                this,
2073                                newStream,
2074                                (cam_stream_type_t)
2075                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2076                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2077                                mMetadataChannel,
2078                                MAX_INFLIGHT_REQUESTS);
2079                        if (channel == NULL) {
2080                            LOGE("allocation of channel failed");
2081                            pthread_mutex_unlock(&mMutex);
2082                            return -ENOMEM;
2083                        }
2084                        newStream->max_buffers = MAX_INFLIGHT_60FPS_REQUESTS;
2085                        newStream->priv = channel;
2086                    }
2087                    break;
2088                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2089                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2090                            mChannelHandle,
2091                            mCameraHandle->ops, captureResultCb,
2092                            setBufferErrorStatus, &padding_info,
2093                            this,
2094                            newStream,
2095                            (cam_stream_type_t)
2096                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2097                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2098                            mMetadataChannel);
2099                    if (channel == NULL) {
2100                        LOGE("allocation of YUV channel failed");
2101                        pthread_mutex_unlock(&mMutex);
2102                        return -ENOMEM;
2103                    }
2104                    newStream->max_buffers = channel->getNumBuffers();
2105                    newStream->priv = channel;
2106                    break;
2107                }
2108                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2109                case HAL_PIXEL_FORMAT_RAW16:
2110                case HAL_PIXEL_FORMAT_RAW10:
2111                    mRawChannel = new QCamera3RawChannel(
2112                            mCameraHandle->camera_handle, mChannelHandle,
2113                            mCameraHandle->ops, captureResultCb,
2114                            setBufferErrorStatus, &padding_info,
2115                            this, newStream,
2116                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2117                            mMetadataChannel,
2118                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2119                    if (mRawChannel == NULL) {
2120                        LOGE("allocation of raw channel failed");
2121                        pthread_mutex_unlock(&mMutex);
2122                        return -ENOMEM;
2123                    }
2124                    newStream->max_buffers = mRawChannel->getNumBuffers();
2125                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2126                    break;
2127                case HAL_PIXEL_FORMAT_BLOB:
2128                    // Max live snapshot inflight buffer is 1. This is to mitigate
2129                    // frame drop issues for video snapshot. The more buffers being
2130                    // allocated, the more frame drops there are.
2131                    mPictureChannel = new QCamera3PicChannel(
2132                            mCameraHandle->camera_handle, mChannelHandle,
2133                            mCameraHandle->ops, captureResultCb,
2134                            setBufferErrorStatus, &padding_info, this, newStream,
2135                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2136                            m_bIs4KVideo, isZsl, mMetadataChannel,
2137                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2138                    if (mPictureChannel == NULL) {
2139                        LOGE("allocation of channel failed");
2140                        pthread_mutex_unlock(&mMutex);
2141                        return -ENOMEM;
2142                    }
2143                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2144                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2145                    mPictureChannel->overrideYuvSize(
2146                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2147                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2148                    break;
2149
2150                default:
2151                    LOGE("not a supported format 0x%x", newStream->format);
2152                    break;
2153                }
2154            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2155                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2156            } else {
2157                LOGE("Error, Unknown stream type");
2158                pthread_mutex_unlock(&mMutex);
2159                return -EINVAL;
2160            }
2161
2162            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2163            if (channel != NULL && channel->isUBWCEnabled()) {
2164                cam_format_t fmt = channel->getStreamDefaultFormat(
2165                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2166                        newStream->width, newStream->height);
2167                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2168                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2169                }
2170            }
2171
2172            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2173                    it != mStreamInfo.end(); it++) {
2174                if ((*it)->stream == newStream) {
2175                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2176                    break;
2177                }
2178            }
2179        } else {
2180            // Channel already exists for this stream
2181            // Do nothing for now
2182        }
2183        padding_info = gCamCapability[mCameraId]->padding_info;
2184
2185        /* Do not add entries for input stream in metastream info
2186         * since there is no real stream associated with it
2187         */
2188        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2189            mStreamConfigInfo.num_streams++;
2190    }
2191
2192    //RAW DUMP channel
2193    if (mEnableRawDump && isRawStreamRequested == false){
2194        cam_dimension_t rawDumpSize;
2195        rawDumpSize = getMaxRawSize(mCameraId);
2196        cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2197        setPAAFSupport(rawDumpFeatureMask,
2198                CAM_STREAM_TYPE_RAW,
2199                gCamCapability[mCameraId]->color_arrangement);
2200        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2201                                  mChannelHandle,
2202                                  mCameraHandle->ops,
2203                                  rawDumpSize,
2204                                  &padding_info,
2205                                  this, rawDumpFeatureMask);
2206        if (!mRawDumpChannel) {
2207            LOGE("Raw Dump channel cannot be created");
2208            pthread_mutex_unlock(&mMutex);
2209            return -ENOMEM;
2210        }
2211    }
2212
2213
2214    if (mAnalysisChannel) {
2215        cam_analysis_info_t analysisInfo;
2216        memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2217        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2218                CAM_STREAM_TYPE_ANALYSIS;
2219        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2220                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2221        rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2222                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2223                &analysisInfo);
2224        if (rc != NO_ERROR) {
2225            LOGE("getAnalysisInfo failed, ret = %d", rc);
2226            pthread_mutex_unlock(&mMutex);
2227            return rc;
2228        }
2229        cam_color_filter_arrangement_t analysis_color_arrangement =
2230                (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2231                CAM_FILTER_ARRANGEMENT_Y :
2232                gCamCapability[mCameraId]->color_arrangement);
2233        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2234                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2235                analysis_color_arrangement);
2236
2237        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2238                analysisInfo.analysis_max_res;
2239        mStreamConfigInfo.num_streams++;
2240    }
2241
2242    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2243        cam_analysis_info_t supportInfo;
2244        memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2245        cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2246        setPAAFSupport(callbackFeatureMask,
2247                CAM_STREAM_TYPE_CALLBACK,
2248                gCamCapability[mCameraId]->color_arrangement);
2249        rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2250        if (rc != NO_ERROR) {
2251            LOGE("getAnalysisInfo failed, ret = %d", rc);
2252            pthread_mutex_unlock(&mMutex);
2253            return rc;
2254        }
2255        mSupportChannel = new QCamera3SupportChannel(
2256                mCameraHandle->camera_handle,
2257                mChannelHandle,
2258                mCameraHandle->ops,
2259                &gCamCapability[mCameraId]->padding_info,
2260                callbackFeatureMask,
2261                CAM_STREAM_TYPE_CALLBACK,
2262                &QCamera3SupportChannel::kDim,
2263                CAM_FORMAT_YUV_420_NV21,
2264                supportInfo.hw_analysis_supported,
2265                this, 0);
2266        if (!mSupportChannel) {
2267            LOGE("dummy channel cannot be created");
2268            pthread_mutex_unlock(&mMutex);
2269            return -ENOMEM;
2270        }
2271    }
2272
2273    if (mSupportChannel) {
2274        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2275                QCamera3SupportChannel::kDim;
2276        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2277                CAM_STREAM_TYPE_CALLBACK;
2278        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2279                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2280        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2281                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2282                gCamCapability[mCameraId]->color_arrangement);
2283        mStreamConfigInfo.num_streams++;
2284    }
2285
2286    if (mRawDumpChannel) {
2287        cam_dimension_t rawSize;
2288        rawSize = getMaxRawSize(mCameraId);
2289        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2290                rawSize;
2291        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2292                CAM_STREAM_TYPE_RAW;
2293        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2294                CAM_QCOM_FEATURE_NONE;
2295        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2296                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2297                gCamCapability[mCameraId]->color_arrangement);
2298        mStreamConfigInfo.num_streams++;
2299    }
2300    /* In HFR mode, if video stream is not added, create a dummy channel so that
2301     * ISP can create a batch mode even for preview only case. This channel is
2302     * never 'start'ed (no stream-on), it is only 'initialized'  */
2303    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2304            !m_bIsVideo) {
2305        cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2306        setPAAFSupport(dummyFeatureMask,
2307                CAM_STREAM_TYPE_VIDEO,
2308                gCamCapability[mCameraId]->color_arrangement);
2309        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2310                mChannelHandle,
2311                mCameraHandle->ops, captureResultCb,
2312                setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2313                this,
2314                &mDummyBatchStream,
2315                CAM_STREAM_TYPE_VIDEO,
2316                dummyFeatureMask,
2317                mMetadataChannel);
2318        if (NULL == mDummyBatchChannel) {
2319            LOGE("creation of mDummyBatchChannel failed."
2320                    "Preview will use non-hfr sensor mode ");
2321        }
2322    }
2323    if (mDummyBatchChannel) {
2324        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2325                mDummyBatchStream.width;
2326        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2327                mDummyBatchStream.height;
2328        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2329                CAM_STREAM_TYPE_VIDEO;
2330        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2331                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2332        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2333                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2334                gCamCapability[mCameraId]->color_arrangement);
2335        mStreamConfigInfo.num_streams++;
2336    }
2337
2338    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2339    mStreamConfigInfo.buffer_info.max_buffers =
2340            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2341
2342    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2343    for (pendingRequestIterator i = mPendingRequestsList.begin();
2344            i != mPendingRequestsList.end();) {
2345        i = erasePendingRequest(i);
2346    }
2347    mPendingFrameDropList.clear();
2348    // Initialize/Reset the pending buffers list
2349    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2350        req.mPendingBufferList.clear();
2351    }
2352    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2353
2354    mPendingReprocessResultList.clear();
2355
2356    mCurJpegMeta.clear();
2357    //Get min frame duration for this streams configuration
2358    deriveMinFrameDuration();
2359
2360    // Update state
2361    mState = CONFIGURED;
2362
2363    pthread_mutex_unlock(&mMutex);
2364
2365    return rc;
2366}
2367
2368/*===========================================================================
2369 * FUNCTION   : validateCaptureRequest
2370 *
2371 * DESCRIPTION: validate a capture request from camera service
2372 *
2373 * PARAMETERS :
2374 *   @request : request from framework to process
2375 *
2376 * RETURN     :
2377 *
2378 *==========================================================================*/
2379int QCamera3HardwareInterface::validateCaptureRequest(
2380                    camera3_capture_request_t *request)
2381{
2382    ssize_t idx = 0;
2383    const camera3_stream_buffer_t *b;
2384    CameraMetadata meta;
2385
2386    /* Sanity check the request */
2387    if (request == NULL) {
2388        LOGE("NULL capture request");
2389        return BAD_VALUE;
2390    }
2391
2392    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2393        /*settings cannot be null for the first request*/
2394        return BAD_VALUE;
2395    }
2396
2397    uint32_t frameNumber = request->frame_number;
2398    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2399        LOGE("Request %d: No output buffers provided!",
2400                __FUNCTION__, frameNumber);
2401        return BAD_VALUE;
2402    }
2403    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2404        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2405                 request->num_output_buffers, MAX_NUM_STREAMS);
2406        return BAD_VALUE;
2407    }
2408    if (request->input_buffer != NULL) {
2409        b = request->input_buffer;
2410        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2411            LOGE("Request %d: Buffer %ld: Status not OK!",
2412                     frameNumber, (long)idx);
2413            return BAD_VALUE;
2414        }
2415        if (b->release_fence != -1) {
2416            LOGE("Request %d: Buffer %ld: Has a release fence!",
2417                     frameNumber, (long)idx);
2418            return BAD_VALUE;
2419        }
2420        if (b->buffer == NULL) {
2421            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2422                     frameNumber, (long)idx);
2423            return BAD_VALUE;
2424        }
2425    }
2426
2427    // Validate all buffers
2428    b = request->output_buffers;
2429    do {
2430        QCamera3ProcessingChannel *channel =
2431                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2432        if (channel == NULL) {
2433            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2434                     frameNumber, (long)idx);
2435            return BAD_VALUE;
2436        }
2437        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2438            LOGE("Request %d: Buffer %ld: Status not OK!",
2439                     frameNumber, (long)idx);
2440            return BAD_VALUE;
2441        }
2442        if (b->release_fence != -1) {
2443            LOGE("Request %d: Buffer %ld: Has a release fence!",
2444                     frameNumber, (long)idx);
2445            return BAD_VALUE;
2446        }
2447        if (b->buffer == NULL) {
2448            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2449                     frameNumber, (long)idx);
2450            return BAD_VALUE;
2451        }
2452        if (*(b->buffer) == NULL) {
2453            LOGE("Request %d: Buffer %ld: NULL private handle!",
2454                     frameNumber, (long)idx);
2455            return BAD_VALUE;
2456        }
2457        idx++;
2458        b = request->output_buffers + idx;
2459    } while (idx < (ssize_t)request->num_output_buffers);
2460
2461    return NO_ERROR;
2462}
2463
2464/*===========================================================================
2465 * FUNCTION   : deriveMinFrameDuration
2466 *
2467 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2468 *              on currently configured streams.
2469 *
2470 * PARAMETERS : NONE
2471 *
2472 * RETURN     : NONE
2473 *
2474 *==========================================================================*/
2475void QCamera3HardwareInterface::deriveMinFrameDuration()
2476{
2477    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2478
2479    maxJpegDim = 0;
2480    maxProcessedDim = 0;
2481    maxRawDim = 0;
2482
2483    // Figure out maximum jpeg, processed, and raw dimensions
2484    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2485        it != mStreamInfo.end(); it++) {
2486
2487        // Input stream doesn't have valid stream_type
2488        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2489            continue;
2490
2491        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2492        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2493            if (dimension > maxJpegDim)
2494                maxJpegDim = dimension;
2495        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2496                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2497                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2498            if (dimension > maxRawDim)
2499                maxRawDim = dimension;
2500        } else {
2501            if (dimension > maxProcessedDim)
2502                maxProcessedDim = dimension;
2503        }
2504    }
2505
2506    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2507            MAX_SIZES_CNT);
2508
2509    //Assume all jpeg dimensions are in processed dimensions.
2510    if (maxJpegDim > maxProcessedDim)
2511        maxProcessedDim = maxJpegDim;
2512    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2513    if (maxProcessedDim > maxRawDim) {
2514        maxRawDim = INT32_MAX;
2515
2516        for (size_t i = 0; i < count; i++) {
2517            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2518                    gCamCapability[mCameraId]->raw_dim[i].height;
2519            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2520                maxRawDim = dimension;
2521        }
2522    }
2523
2524    //Find minimum durations for processed, jpeg, and raw
2525    for (size_t i = 0; i < count; i++) {
2526        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2527                gCamCapability[mCameraId]->raw_dim[i].height) {
2528            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2529            break;
2530        }
2531    }
2532    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2533    for (size_t i = 0; i < count; i++) {
2534        if (maxProcessedDim ==
2535                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2536                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2537            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2538            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2539            break;
2540        }
2541    }
2542}
2543
2544/*===========================================================================
2545 * FUNCTION   : getMinFrameDuration
2546 *
2547 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2548 *              and current request configuration.
2549 *
2550 * PARAMETERS : @request: requset sent by the frameworks
2551 *
2552 * RETURN     : min farme duration for a particular request
2553 *
2554 *==========================================================================*/
2555int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2556{
2557    bool hasJpegStream = false;
2558    bool hasRawStream = false;
2559    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2560        const camera3_stream_t *stream = request->output_buffers[i].stream;
2561        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2562            hasJpegStream = true;
2563        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2564                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2565                stream->format == HAL_PIXEL_FORMAT_RAW16)
2566            hasRawStream = true;
2567    }
2568
2569    if (!hasJpegStream)
2570        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2571    else
2572        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2573}
2574
2575/*===========================================================================
2576 * FUNCTION   : handleBuffersDuringFlushLock
2577 *
2578 * DESCRIPTION: Account for buffers returned from back-end during flush
2579 *              This function is executed while mMutex is held by the caller.
2580 *
2581 * PARAMETERS :
2582 *   @buffer: image buffer for the callback
2583 *
2584 * RETURN     :
2585 *==========================================================================*/
2586void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2587{
2588    bool buffer_found = false;
2589    for (List<PendingBuffersInRequest>::iterator req =
2590            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2591            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2592        for (List<PendingBufferInfo>::iterator i =
2593                req->mPendingBufferList.begin();
2594                i != req->mPendingBufferList.end(); i++) {
2595            if (i->buffer == buffer->buffer) {
2596                mPendingBuffersMap.numPendingBufsAtFlush--;
2597                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2598                    buffer->buffer, req->frame_number,
2599                    mPendingBuffersMap.numPendingBufsAtFlush);
2600                buffer_found = true;
2601                break;
2602            }
2603        }
2604        if (buffer_found) {
2605            break;
2606        }
2607    }
2608    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2609        //signal the flush()
2610        LOGD("All buffers returned to HAL. Continue flush");
2611        pthread_cond_signal(&mBuffersCond);
2612    }
2613}
2614
2615
2616/*===========================================================================
2617 * FUNCTION   : handlePendingReprocResults
2618 *
2619 * DESCRIPTION: check and notify on any pending reprocess results
2620 *
2621 * PARAMETERS :
2622 *   @frame_number   : Pending request frame number
2623 *
2624 * RETURN     : int32_t type of status
2625 *              NO_ERROR  -- success
2626 *              none-zero failure code
2627 *==========================================================================*/
2628int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2629{
2630    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2631            j != mPendingReprocessResultList.end(); j++) {
2632        if (j->frame_number == frame_number) {
2633            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2634
2635            LOGD("Delayed reprocess notify %d",
2636                    frame_number);
2637
2638            for (pendingRequestIterator k = mPendingRequestsList.begin();
2639                    k != mPendingRequestsList.end(); k++) {
2640
2641                if (k->frame_number == j->frame_number) {
2642                    LOGD("Found reprocess frame number %d in pending reprocess List "
2643                            "Take it out!!",
2644                            k->frame_number);
2645
2646                    camera3_capture_result result;
2647                    memset(&result, 0, sizeof(camera3_capture_result));
2648                    result.frame_number = frame_number;
2649                    result.num_output_buffers = 1;
2650                    result.output_buffers =  &j->buffer;
2651                    result.input_buffer = k->input_buffer;
2652                    result.result = k->settings;
2653                    result.partial_result = PARTIAL_RESULT_COUNT;
2654                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2655
2656                    erasePendingRequest(k);
2657                    break;
2658                }
2659            }
2660            mPendingReprocessResultList.erase(j);
2661            break;
2662        }
2663    }
2664    return NO_ERROR;
2665}
2666
2667/*===========================================================================
2668 * FUNCTION   : handleBatchMetadata
2669 *
2670 * DESCRIPTION: Handles metadata buffer callback in batch mode
2671 *
2672 * PARAMETERS : @metadata_buf: metadata buffer
2673 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2674 *                 the meta buf in this method
2675 *
2676 * RETURN     :
2677 *
2678 *==========================================================================*/
2679void QCamera3HardwareInterface::handleBatchMetadata(
2680        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2681{
2682    ATRACE_CALL();
2683
2684    if (NULL == metadata_buf) {
2685        LOGE("metadata_buf is NULL");
2686        return;
2687    }
2688    /* In batch mode, the metdata will contain the frame number and timestamp of
2689     * the last frame in the batch. Eg: a batch containing buffers from request
2690     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2691     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2692     * multiple process_capture_results */
2693    metadata_buffer_t *metadata =
2694            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2695    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2696    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2697    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2698    uint32_t frame_number = 0, urgent_frame_number = 0;
2699    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2700    bool invalid_metadata = false;
2701    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2702    size_t loopCount = 1;
2703
2704    int32_t *p_frame_number_valid =
2705            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2706    uint32_t *p_frame_number =
2707            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2708    int64_t *p_capture_time =
2709            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2710    int32_t *p_urgent_frame_number_valid =
2711            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2712    uint32_t *p_urgent_frame_number =
2713            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2714
2715    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2716            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2717            (NULL == p_urgent_frame_number)) {
2718        LOGE("Invalid metadata");
2719        invalid_metadata = true;
2720    } else {
2721        frame_number_valid = *p_frame_number_valid;
2722        last_frame_number = *p_frame_number;
2723        last_frame_capture_time = *p_capture_time;
2724        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2725        last_urgent_frame_number = *p_urgent_frame_number;
2726    }
2727
2728    /* In batchmode, when no video buffers are requested, set_parms are sent
2729     * for every capture_request. The difference between consecutive urgent
2730     * frame numbers and frame numbers should be used to interpolate the
2731     * corresponding frame numbers and time stamps */
2732    pthread_mutex_lock(&mMutex);
2733    if (urgent_frame_number_valid) {
2734        ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2735        if(idx < 0) {
2736            LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2737                last_urgent_frame_number);
2738            mState = ERROR;
2739            pthread_mutex_unlock(&mMutex);
2740            return;
2741        }
2742        first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
2743        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2744                first_urgent_frame_number;
2745
2746        LOGH("urgent_frm: valid: %d frm_num: %d - %d",
2747                 urgent_frame_number_valid,
2748                first_urgent_frame_number, last_urgent_frame_number);
2749    }
2750
2751    if (frame_number_valid) {
2752        ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2753        if(idx < 0) {
2754            LOGE("Invalid frame number received: %d. Irrecoverable error",
2755                last_frame_number);
2756            mState = ERROR;
2757            pthread_mutex_unlock(&mMutex);
2758            return;
2759        }
2760        first_frame_number = mPendingBatchMap.valueAt(idx);
2761        frameNumDiff = last_frame_number + 1 -
2762                first_frame_number;
2763        mPendingBatchMap.removeItem(last_frame_number);
2764
2765        LOGH("frm: valid: %d frm_num: %d - %d",
2766                 frame_number_valid,
2767                first_frame_number, last_frame_number);
2768
2769    }
2770    pthread_mutex_unlock(&mMutex);
2771
2772    if (urgent_frame_number_valid || frame_number_valid) {
2773        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2774        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2775            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2776                     urgentFrameNumDiff, last_urgent_frame_number);
2777        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2778            LOGE("frameNumDiff: %d frameNum: %d",
2779                     frameNumDiff, last_frame_number);
2780    }
2781
2782    for (size_t i = 0; i < loopCount; i++) {
2783        /* handleMetadataWithLock is called even for invalid_metadata for
2784         * pipeline depth calculation */
2785        if (!invalid_metadata) {
2786            /* Infer frame number. Batch metadata contains frame number of the
2787             * last frame */
2788            if (urgent_frame_number_valid) {
2789                if (i < urgentFrameNumDiff) {
2790                    urgent_frame_number =
2791                            first_urgent_frame_number + i;
2792                    LOGD("inferred urgent frame_number: %d",
2793                             urgent_frame_number);
2794                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2795                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2796                } else {
2797                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2798                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2799                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2800                }
2801            }
2802
2803            /* Infer frame number. Batch metadata contains frame number of the
2804             * last frame */
2805            if (frame_number_valid) {
2806                if (i < frameNumDiff) {
2807                    frame_number = first_frame_number + i;
2808                    LOGD("inferred frame_number: %d", frame_number);
2809                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2810                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2811                } else {
2812                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2813                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2814                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2815                }
2816            }
2817
2818            if (last_frame_capture_time) {
2819                //Infer timestamp
2820                first_frame_capture_time = last_frame_capture_time -
2821                        (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2822                capture_time =
2823                        first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2824                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2825                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2826                LOGH("batch capture_time: %lld, capture_time: %lld",
2827                         last_frame_capture_time, capture_time);
2828            }
2829        }
2830        pthread_mutex_lock(&mMutex);
2831        handleMetadataWithLock(metadata_buf,
2832                false /* free_and_bufdone_meta_buf */,
2833                (i == 0) /* first metadata in the batch metadata */);
2834        pthread_mutex_unlock(&mMutex);
2835    }
2836
2837    /* BufDone metadata buffer */
2838    if (free_and_bufdone_meta_buf) {
2839        mMetadataChannel->bufDone(metadata_buf);
2840        free(metadata_buf);
2841    }
2842}
2843
2844void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2845        camera3_error_msg_code_t errorCode)
2846{
2847    camera3_notify_msg_t notify_msg;
2848    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2849    notify_msg.type = CAMERA3_MSG_ERROR;
2850    notify_msg.message.error.error_code = errorCode;
2851    notify_msg.message.error.error_stream = NULL;
2852    notify_msg.message.error.frame_number = frameNumber;
2853    mCallbackOps->notify(mCallbackOps, &notify_msg);
2854
2855    return;
2856}
2857/*===========================================================================
2858 * FUNCTION   : handleMetadataWithLock
2859 *
2860 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2861 *
2862 * PARAMETERS : @metadata_buf: metadata buffer
2863 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2864 *                 the meta buf in this method
2865 *              @firstMetadataInBatch: Boolean to indicate whether this is the
2866 *                  first metadata in a batch. Valid only for batch mode
2867 *
2868 * RETURN     :
2869 *
2870 *==========================================================================*/
2871void QCamera3HardwareInterface::handleMetadataWithLock(
2872    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2873    bool firstMetadataInBatch)
2874{
2875    ATRACE_CALL();
2876    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2877        //during flush do not send metadata from this thread
2878        LOGD("not sending metadata during flush or when mState is error");
2879        if (free_and_bufdone_meta_buf) {
2880            mMetadataChannel->bufDone(metadata_buf);
2881            free(metadata_buf);
2882        }
2883        return;
2884    }
2885
2886    //not in flush
2887    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2888    int32_t frame_number_valid, urgent_frame_number_valid;
2889    uint32_t frame_number, urgent_frame_number;
2890    int64_t capture_time;
2891    nsecs_t currentSysTime;
2892
2893    int32_t *p_frame_number_valid =
2894            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2895    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2896    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2897    int32_t *p_urgent_frame_number_valid =
2898            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2899    uint32_t *p_urgent_frame_number =
2900            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2901    IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2902            metadata) {
2903        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2904                 *p_frame_number_valid, *p_frame_number);
2905    }
2906
2907    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2908            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2909        LOGE("Invalid metadata");
2910        if (free_and_bufdone_meta_buf) {
2911            mMetadataChannel->bufDone(metadata_buf);
2912            free(metadata_buf);
2913        }
2914        goto done_metadata;
2915    }
2916    frame_number_valid =        *p_frame_number_valid;
2917    frame_number =              *p_frame_number;
2918    capture_time =              *p_capture_time;
2919    urgent_frame_number_valid = *p_urgent_frame_number_valid;
2920    urgent_frame_number =       *p_urgent_frame_number;
2921    currentSysTime =            systemTime(CLOCK_MONOTONIC);
2922
2923    // Detect if buffers from any requests are overdue
2924    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2925        if ( (currentSysTime - req.timestamp) >
2926            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2927            for (auto &missed : req.mPendingBufferList) {
2928                assert(missed.stream->priv);
2929                if (missed.stream->priv) {
2930                    QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
2931                    assert(ch->mStreams[0]);
2932                    if (ch->mStreams[0]) {
2933                        LOGW("Missing: frame = %d, buffer = %p,"
2934                            "stream type = %d, stream format = %d",
2935                            req.frame_number, missed.buffer,
2936                            ch->mStreams[0]->getMyType(), missed.stream->format);
2937                        ch->timeoutFrame(req.frame_number);
2938                    }
2939                }
2940            }
2941        }
2942    }
2943    //Partial result on process_capture_result for timestamp
2944    if (urgent_frame_number_valid) {
2945        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2946           urgent_frame_number, capture_time);
2947
2948        //Recieved an urgent Frame Number, handle it
2949        //using partial results
2950        for (pendingRequestIterator i =
2951                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2952            LOGD("Iterator Frame = %d urgent frame = %d",
2953                 i->frame_number, urgent_frame_number);
2954
2955            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2956                (i->partial_result_cnt == 0)) {
2957                LOGE("Error: HAL missed urgent metadata for frame number %d",
2958                         i->frame_number);
2959            }
2960
2961            if (i->frame_number == urgent_frame_number &&
2962                     i->bUrgentReceived == 0) {
2963
2964                camera3_capture_result_t result;
2965                memset(&result, 0, sizeof(camera3_capture_result_t));
2966
2967                i->partial_result_cnt++;
2968                i->bUrgentReceived = 1;
2969                // Extract 3A metadata
2970                result.result =
2971                    translateCbUrgentMetadataToResultMetadata(metadata);
2972                // Populate metadata result
2973                result.frame_number = urgent_frame_number;
2974                result.num_output_buffers = 0;
2975                result.output_buffers = NULL;
2976                result.partial_result = i->partial_result_cnt;
2977
2978                mCallbackOps->process_capture_result(mCallbackOps, &result);
2979                LOGD("urgent frame_number = %u, capture_time = %lld",
2980                      result.frame_number, capture_time);
2981                free_camera_metadata((camera_metadata_t *)result.result);
2982                break;
2983            }
2984        }
2985    }
2986
2987    if (!frame_number_valid) {
2988        LOGD("Not a valid normal frame number, used as SOF only");
2989        if (free_and_bufdone_meta_buf) {
2990            mMetadataChannel->bufDone(metadata_buf);
2991            free(metadata_buf);
2992        }
2993        goto done_metadata;
2994    }
2995    LOGH("valid frame_number = %u, capture_time = %lld",
2996            frame_number, capture_time);
2997
2998    for (pendingRequestIterator i = mPendingRequestsList.begin();
2999            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3000        // Flush out all entries with less or equal frame numbers.
3001
3002        camera3_capture_result_t result;
3003        memset(&result, 0, sizeof(camera3_capture_result_t));
3004
3005        LOGD("frame_number in the list is %u", i->frame_number);
3006        i->partial_result_cnt++;
3007        result.partial_result = i->partial_result_cnt;
3008
3009        // Check whether any stream buffer corresponding to this is dropped or not
3010        // If dropped, then send the ERROR_BUFFER for the corresponding stream
3011        // The API does not expect a blob buffer to be dropped
3012        if (p_cam_frame_drop) {
3013            /* Clear notify_msg structure */
3014            camera3_notify_msg_t notify_msg;
3015            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3016            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3017                    j != i->buffers.end(); j++) {
3018                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3019                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3020                for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
3021                    if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
3022                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3023                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
3024                                __func__, i->frame_number, streamID, j->stream->format);
3025                        notify_msg.type = CAMERA3_MSG_ERROR;
3026                        notify_msg.message.error.frame_number = i->frame_number;
3027                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
3028                        notify_msg.message.error.error_stream = j->stream;
3029                        mCallbackOps->notify(mCallbackOps, &notify_msg);
3030                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
3031                                __func__, i->frame_number, streamID, j->stream->format);
3032                        PendingFrameDropInfo PendingFrameDrop;
3033                        PendingFrameDrop.frame_number=i->frame_number;
3034                        PendingFrameDrop.stream_ID = streamID;
3035                        // Add the Frame drop info to mPendingFrameDropList
3036                        mPendingFrameDropList.push_back(PendingFrameDrop);
3037                   }
3038               }
3039            }
3040        }
3041
3042        // Send empty metadata with already filled buffers for dropped metadata
3043        // and send valid metadata with already filled buffers for current metadata
3044        /* we could hit this case when we either
3045         * 1. have a pending reprocess request or
3046         * 2. miss a metadata buffer callback */
3047        if (i->frame_number < frame_number) {
3048            if (i->input_buffer) {
3049                /* this will be handled in handleInputBufferWithLock */
3050                i++;
3051                continue;
3052            } else if (mBatchSize) {
3053
3054                mPendingLiveRequest--;
3055
3056                CameraMetadata dummyMetadata;
3057                dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3058                result.result = dummyMetadata.release();
3059
3060                notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
3061            } else {
3062                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
3063                if (free_and_bufdone_meta_buf) {
3064                    mMetadataChannel->bufDone(metadata_buf);
3065                    free(metadata_buf);
3066                }
3067                mState = ERROR;
3068                goto done_metadata;
3069            }
3070        } else {
3071            mPendingLiveRequest--;
3072            /* Clear notify_msg structure */
3073            camera3_notify_msg_t notify_msg;
3074            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3075
3076            // Send shutter notify to frameworks
3077            notify_msg.type = CAMERA3_MSG_SHUTTER;
3078            notify_msg.message.shutter.frame_number = i->frame_number;
3079            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3080            mCallbackOps->notify(mCallbackOps, &notify_msg);
3081
3082            i->timestamp = capture_time;
3083
3084            /* Set the timestamp in display metadata so that clients aware of
3085               private_handle such as VT can use this un-modified timestamps.
3086               Camera framework is unaware of this timestamp and cannot change this */
3087            updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3088
3089            // Find channel requiring metadata, meaning internal offline postprocess
3090            // is needed.
3091            //TODO: for now, we don't support two streams requiring metadata at the same time.
3092            // (because we are not making copies, and metadata buffer is not reference counted.
3093            bool internalPproc = false;
3094            for (pendingBufferIterator iter = i->buffers.begin();
3095                    iter != i->buffers.end(); iter++) {
3096                if (iter->need_metadata) {
3097                    internalPproc = true;
3098                    QCamera3ProcessingChannel *channel =
3099                            (QCamera3ProcessingChannel *)iter->stream->priv;
3100                    channel->queueReprocMetadata(metadata_buf);
3101                    break;
3102                }
3103            }
3104
3105            // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
3106            result.result = translateFromHalMetadata(metadata,
3107                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3108                    i->capture_intent, i->hybrid_ae_enable,
3109                     /* DevCamDebug metadata translateFromHalMetadata function call*/
3110                    i->DevCamDebug_meta_enable,
3111                    /* DevCamDebug metadata end */
3112                    internalPproc, i->fwkCacMode,
3113                    firstMetadataInBatch);
3114            // atrace_end(ATRACE_TAG_ALWAYS);
3115
3116            saveExifParams(metadata);
3117
3118            if (i->blob_request) {
3119                {
3120                    //Dump tuning metadata if enabled and available
3121                    char prop[PROPERTY_VALUE_MAX];
3122                    memset(prop, 0, sizeof(prop));
3123                    property_get("persist.camera.dumpmetadata", prop, "0");
3124                    int32_t enabled = atoi(prop);
3125                    if (enabled && metadata->is_tuning_params_valid) {
3126                        dumpMetadataToFile(metadata->tuning_params,
3127                               mMetaFrameCount,
3128                               enabled,
3129                               "Snapshot",
3130                               frame_number);
3131                    }
3132                }
3133            }
3134
3135            if (!internalPproc) {
3136                LOGD("couldn't find need_metadata for this metadata");
3137                // Return metadata buffer
3138                if (free_and_bufdone_meta_buf) {
3139                    mMetadataChannel->bufDone(metadata_buf);
3140                    free(metadata_buf);
3141                }
3142            }
3143        }
3144        if (!result.result) {
3145            LOGE("metadata is NULL");
3146        }
3147        result.frame_number = i->frame_number;
3148        result.input_buffer = i->input_buffer;
3149        result.num_output_buffers = 0;
3150        result.output_buffers = NULL;
3151        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3152                    j != i->buffers.end(); j++) {
3153            if (j->buffer) {
3154                result.num_output_buffers++;
3155            }
3156        }
3157
3158        updateFpsInPreviewBuffer(metadata, i->frame_number);
3159
3160        if (result.num_output_buffers > 0) {
3161            camera3_stream_buffer_t *result_buffers =
3162                new camera3_stream_buffer_t[result.num_output_buffers];
3163            if (result_buffers != NULL) {
3164                size_t result_buffers_idx = 0;
3165                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3166                        j != i->buffers.end(); j++) {
3167                    if (j->buffer) {
3168                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3169                                m != mPendingFrameDropList.end(); m++) {
3170                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3171                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3172                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3173                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3174                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3175                                        frame_number, streamID);
3176                                m = mPendingFrameDropList.erase(m);
3177                                break;
3178                            }
3179                        }
3180                        j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
3181                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
3182                        result_buffers[result_buffers_idx++] = *(j->buffer);
3183                        free(j->buffer);
3184                        j->buffer = NULL;
3185                    }
3186                }
3187
3188                result.output_buffers = result_buffers;
3189                mCallbackOps->process_capture_result(mCallbackOps, &result);
3190                LOGD("meta frame_number = %u, capture_time = %lld",
3191                        result.frame_number, i->timestamp);
3192                free_camera_metadata((camera_metadata_t *)result.result);
3193                delete[] result_buffers;
3194            }else {
3195                LOGE("Fatal error: out of memory");
3196            }
3197        } else {
3198            mCallbackOps->process_capture_result(mCallbackOps, &result);
3199            LOGD("meta frame_number = %u, capture_time = %lld",
3200                    result.frame_number, i->timestamp);
3201            free_camera_metadata((camera_metadata_t *)result.result);
3202        }
3203
3204        i = erasePendingRequest(i);
3205
3206        if (!mPendingReprocessResultList.empty()) {
3207            handlePendingReprocResults(frame_number + 1);
3208        }
3209    }
3210
3211done_metadata:
3212    for (pendingRequestIterator i = mPendingRequestsList.begin();
3213            i != mPendingRequestsList.end() ;i++) {
3214        i->pipeline_depth++;
3215    }
3216    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3217    unblockRequestIfNecessary();
3218}
3219
3220/*===========================================================================
3221 * FUNCTION   : hdrPlusPerfLock
3222 *
3223 * DESCRIPTION: perf lock for HDR+ using custom intent
3224 *
3225 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3226 *
3227 * RETURN     : None
3228 *
3229 *==========================================================================*/
3230void QCamera3HardwareInterface::hdrPlusPerfLock(
3231        mm_camera_super_buf_t *metadata_buf)
3232{
3233    if (NULL == metadata_buf) {
3234        LOGE("metadata_buf is NULL");
3235        return;
3236    }
3237    metadata_buffer_t *metadata =
3238            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3239    int32_t *p_frame_number_valid =
3240            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3241    uint32_t *p_frame_number =
3242            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3243
3244    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3245        LOGE("%s: Invalid metadata", __func__);
3246        return;
3247    }
3248
3249    //acquire perf lock for 5 sec after the last HDR frame is captured
3250    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3251        if ((p_frame_number != NULL) &&
3252                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3253            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3254        }
3255    }
3256
3257    //release lock after perf lock timer is expired. If lock is already released,
3258    //isTimerReset returns false
3259    if (m_perfLock.isTimerReset()) {
3260        mLastCustIntentFrmNum = -1;
3261        m_perfLock.lock_rel_timed();
3262    }
3263}
3264
3265/*===========================================================================
3266 * FUNCTION   : handleInputBufferWithLock
3267 *
3268 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3269 *
3270 * PARAMETERS : @frame_number: frame number of the input buffer
3271 *
3272 * RETURN     :
3273 *
3274 *==========================================================================*/
3275void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3276{
3277    ATRACE_CALL();
3278    pendingRequestIterator i = mPendingRequestsList.begin();
3279    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3280        i++;
3281    }
3282    if (i != mPendingRequestsList.end() && i->input_buffer) {
3283        //found the right request
3284        if (!i->shutter_notified) {
3285            CameraMetadata settings;
3286            camera3_notify_msg_t notify_msg;
3287            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3288            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3289            if(i->settings) {
3290                settings = i->settings;
3291                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3292                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3293                } else {
3294                    LOGE("No timestamp in input settings! Using current one.");
3295                }
3296            } else {
3297                LOGE("Input settings missing!");
3298            }
3299
3300            notify_msg.type = CAMERA3_MSG_SHUTTER;
3301            notify_msg.message.shutter.frame_number = frame_number;
3302            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3303            mCallbackOps->notify(mCallbackOps, &notify_msg);
3304            i->shutter_notified = true;
3305            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3306                        i->frame_number, notify_msg.message.shutter.timestamp);
3307        }
3308
3309        if (i->input_buffer->release_fence != -1) {
3310           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3311           close(i->input_buffer->release_fence);
3312           if (rc != OK) {
3313               LOGE("input buffer sync wait failed %d", rc);
3314           }
3315        }
3316
3317        camera3_capture_result result;
3318        memset(&result, 0, sizeof(camera3_capture_result));
3319        result.frame_number = frame_number;
3320        result.result = i->settings;
3321        result.input_buffer = i->input_buffer;
3322        result.partial_result = PARTIAL_RESULT_COUNT;
3323
3324        mCallbackOps->process_capture_result(mCallbackOps, &result);
3325        LOGD("Input request metadata and input buffer frame_number = %u",
3326                        i->frame_number);
3327        i = erasePendingRequest(i);
3328    } else {
3329        LOGE("Could not find input request for frame number %d", frame_number);
3330    }
3331}
3332
3333/*===========================================================================
3334 * FUNCTION   : handleBufferWithLock
3335 *
3336 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3337 *
3338 * PARAMETERS : @buffer: image buffer for the callback
3339 *              @frame_number: frame number of the image buffer
3340 *
3341 * RETURN     :
3342 *
3343 *==========================================================================*/
3344void QCamera3HardwareInterface::handleBufferWithLock(
3345    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3346{
3347    ATRACE_CALL();
3348    /* Nothing to be done during error state */
3349    if ((ERROR == mState) || (DEINIT == mState)) {
3350        return;
3351    }
3352    if (mFlushPerf) {
3353        handleBuffersDuringFlushLock(buffer);
3354        return;
3355    }
3356    //not in flush
3357    // If the frame number doesn't exist in the pending request list,
3358    // directly send the buffer to the frameworks, and update pending buffers map
3359    // Otherwise, book-keep the buffer.
3360    pendingRequestIterator i = mPendingRequestsList.begin();
3361    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3362        i++;
3363    }
3364    if (i == mPendingRequestsList.end()) {
3365        // Verify all pending requests frame_numbers are greater
3366        for (pendingRequestIterator j = mPendingRequestsList.begin();
3367                j != mPendingRequestsList.end(); j++) {
3368            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3369                LOGW("Error: pending live frame number %d is smaller than %d",
3370                         j->frame_number, frame_number);
3371            }
3372        }
3373        camera3_capture_result_t result;
3374        memset(&result, 0, sizeof(camera3_capture_result_t));
3375        result.result = NULL;
3376        result.frame_number = frame_number;
3377        result.num_output_buffers = 1;
3378        result.partial_result = 0;
3379        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3380                m != mPendingFrameDropList.end(); m++) {
3381            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3382            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3383            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3384                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3385                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3386                         frame_number, streamID);
3387                m = mPendingFrameDropList.erase(m);
3388                break;
3389            }
3390        }
3391        buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3392        result.output_buffers = buffer;
3393        LOGH("result frame_number = %d, buffer = %p",
3394                 frame_number, buffer->buffer);
3395
3396        mPendingBuffersMap.removeBuf(buffer->buffer);
3397
3398        mCallbackOps->process_capture_result(mCallbackOps, &result);
3399    } else {
3400        if (i->input_buffer) {
3401            CameraMetadata settings;
3402            camera3_notify_msg_t notify_msg;
3403            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3404            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3405            if(i->settings) {
3406                settings = i->settings;
3407                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3408                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3409                } else {
3410                    LOGW("No timestamp in input settings! Using current one.");
3411                }
3412            } else {
3413                LOGE("Input settings missing!");
3414            }
3415
3416            notify_msg.type = CAMERA3_MSG_SHUTTER;
3417            notify_msg.message.shutter.frame_number = frame_number;
3418            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3419
3420            if (i->input_buffer->release_fence != -1) {
3421               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3422               close(i->input_buffer->release_fence);
3423               if (rc != OK) {
3424                   LOGE("input buffer sync wait failed %d", rc);
3425               }
3426            }
3427            buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3428            mPendingBuffersMap.removeBuf(buffer->buffer);
3429
3430            bool notifyNow = true;
3431            for (pendingRequestIterator j = mPendingRequestsList.begin();
3432                    j != mPendingRequestsList.end(); j++) {
3433                if (j->frame_number < frame_number) {
3434                    notifyNow = false;
3435                    break;
3436                }
3437            }
3438
3439            if (notifyNow) {
3440                camera3_capture_result result;
3441                memset(&result, 0, sizeof(camera3_capture_result));
3442                result.frame_number = frame_number;
3443                result.result = i->settings;
3444                result.input_buffer = i->input_buffer;
3445                result.num_output_buffers = 1;
3446                result.output_buffers = buffer;
3447                result.partial_result = PARTIAL_RESULT_COUNT;
3448
3449                mCallbackOps->notify(mCallbackOps, &notify_msg);
3450                mCallbackOps->process_capture_result(mCallbackOps, &result);
3451                LOGD("Notify reprocess now %d!", frame_number);
3452                i = erasePendingRequest(i);
3453            } else {
3454                // Cache reprocess result for later
3455                PendingReprocessResult pendingResult;
3456                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3457                pendingResult.notify_msg = notify_msg;
3458                pendingResult.buffer = *buffer;
3459                pendingResult.frame_number = frame_number;
3460                mPendingReprocessResultList.push_back(pendingResult);
3461                LOGD("Cache reprocess result %d!", frame_number);
3462            }
3463        } else {
3464            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3465                j != i->buffers.end(); j++) {
3466                if (j->stream == buffer->stream) {
3467                    if (j->buffer != NULL) {
3468                        LOGE("Error: buffer is already set");
3469                    } else {
3470                        j->buffer = (camera3_stream_buffer_t *)malloc(
3471                            sizeof(camera3_stream_buffer_t));
3472                        *(j->buffer) = *buffer;
3473                        LOGH("cache buffer %p at result frame_number %u",
3474                             buffer->buffer, frame_number);
3475                    }
3476                }
3477            }
3478        }
3479    }
3480}
3481
3482/*===========================================================================
3483 * FUNCTION   : unblockRequestIfNecessary
3484 *
3485 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3486 *              that mMutex is held when this function is called.
3487 *
3488 * PARAMETERS :
3489 *
3490 * RETURN     :
3491 *
3492 *==========================================================================*/
3493void QCamera3HardwareInterface::unblockRequestIfNecessary()
3494{
3495   // Unblock process_capture_request
3496   pthread_cond_signal(&mRequestCond);
3497}
3498
3499
3500/*===========================================================================
3501 * FUNCTION   : processCaptureRequest
3502 *
3503 * DESCRIPTION: process a capture request from camera service
3504 *
3505 * PARAMETERS :
3506 *   @request : request from framework to process
3507 *
3508 * RETURN     :
3509 *
3510 *==========================================================================*/
3511int QCamera3HardwareInterface::processCaptureRequest(
3512                    camera3_capture_request_t *request)
3513{
3514    ATRACE_CALL();
3515    int rc = NO_ERROR;
3516    int32_t request_id;
3517    CameraMetadata meta;
3518    bool isVidBufRequested = false;
3519    camera3_stream_buffer_t *pInputBuffer = NULL;
3520
3521    pthread_mutex_lock(&mMutex);
3522
3523    // Validate current state
3524    switch (mState) {
3525        case CONFIGURED:
3526        case STARTED:
3527            /* valid state */
3528            break;
3529
3530        case ERROR:
3531            pthread_mutex_unlock(&mMutex);
3532            handleCameraDeviceError();
3533            return -ENODEV;
3534
3535        default:
3536            LOGE("Invalid state %d", mState);
3537            pthread_mutex_unlock(&mMutex);
3538            return -ENODEV;
3539    }
3540
3541    rc = validateCaptureRequest(request);
3542    if (rc != NO_ERROR) {
3543        LOGE("incoming request is not valid");
3544        pthread_mutex_unlock(&mMutex);
3545        return rc;
3546    }
3547
3548    meta = request->settings;
3549
3550    // For first capture request, send capture intent, and
3551    // stream on all streams
3552    if (mState == CONFIGURED) {
3553        // send an unconfigure to the backend so that the isp
3554        // resources are deallocated
3555        if (!mFirstConfiguration) {
3556            cam_stream_size_info_t stream_config_info;
3557            int32_t hal_version = CAM_HAL_V3;
3558            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3559            stream_config_info.buffer_info.min_buffers =
3560                    MIN_INFLIGHT_REQUESTS;
3561            stream_config_info.buffer_info.max_buffers =
3562                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3563            clear_metadata_buffer(mParameters);
3564            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3565                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3566            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3567                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3568            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3569                    mParameters);
3570            if (rc < 0) {
3571                LOGE("set_parms for unconfigure failed");
3572                pthread_mutex_unlock(&mMutex);
3573                return rc;
3574            }
3575        }
3576        m_perfLock.lock_acq();
3577        /* get eis information for stream configuration */
3578        cam_is_type_t is_type;
3579        char is_type_value[PROPERTY_VALUE_MAX];
3580        property_get("persist.camera.is_type", is_type_value, "0");
3581        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3582
3583        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3584            int32_t hal_version = CAM_HAL_V3;
3585            uint8_t captureIntent =
3586                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3587            mCaptureIntent = captureIntent;
3588            clear_metadata_buffer(mParameters);
3589            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3590            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3591        }
3592
3593        //If EIS is enabled, turn it on for video
3594        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3595        int32_t vsMode;
3596        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3597        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3598            rc = BAD_VALUE;
3599        }
3600
3601        //IS type will be 0 unless EIS is supported. If EIS is supported
3602        //it could either be 1 or 4 depending on the stream and video size
3603        if (setEis) {
3604            if (!m_bEisSupportedSize) {
3605                is_type = IS_TYPE_DIS;
3606            } else {
3607                is_type = IS_TYPE_EIS_2_0;
3608            }
3609            mStreamConfigInfo.is_type = is_type;
3610        } else {
3611            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3612        }
3613
3614        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3615                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3616        int32_t tintless_value = 1;
3617        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3618                CAM_INTF_PARM_TINTLESS, tintless_value);
3619        //Disable CDS for HFR mode or if DIS/EIS is on.
3620        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3621        //after every configure_stream
3622        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3623                (m_bIsVideo)) {
3624            int32_t cds = CAM_CDS_MODE_OFF;
3625            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3626                    CAM_INTF_PARM_CDS_MODE, cds))
3627                LOGE("Failed to disable CDS for HFR mode");
3628
3629        }
3630
3631        if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3632            uint8_t* use_av_timer = NULL;
3633
3634            if (m_debug_avtimer){
3635                use_av_timer = &m_debug_avtimer;
3636            }
3637            else{
3638                use_av_timer =
3639                    meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3640            }
3641
3642            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3643                rc = BAD_VALUE;
3644            }
3645        }
3646
3647        setMobicat();
3648
3649        /* Set fps and hfr mode while sending meta stream info so that sensor
3650         * can configure appropriate streaming mode */
3651        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3652        mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3653        mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3654        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3655            rc = setHalFpsRange(meta, mParameters);
3656            if (rc == NO_ERROR) {
3657                int32_t max_fps =
3658                    (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3659                if (mBatchSize) {
3660                    /* For HFR, more buffers are dequeued upfront to improve the performance */
3661                    mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3662                    mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3663                } else if (max_fps == 60) {
3664                    /* for 60 fps usecas increase inflight requests */
3665                    mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3666                    mMaxInFlightRequests = MAX_INFLIGHT_60FPS_REQUESTS;
3667                } else if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3668                    /* for non 60 fps video use cases, set min = max inflight requests to
3669                    avoid frame drops due to degraded system performance */
3670                    mMinInFlightRequests = MAX_INFLIGHT_REQUESTS;
3671                }
3672            }
3673            else {
3674                LOGE("setHalFpsRange failed");
3675            }
3676        }
3677        if (meta.exists(ANDROID_CONTROL_MODE)) {
3678            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3679            rc = extractSceneMode(meta, metaMode, mParameters);
3680            if (rc != NO_ERROR) {
3681                LOGE("extractSceneMode failed");
3682            }
3683        }
3684        memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3685
3686
3687        //TODO: validate the arguments, HSV scenemode should have only the
3688        //advertised fps ranges
3689
3690        /*set the capture intent, hal version, tintless, stream info,
3691         *and disenable parameters to the backend*/
3692        LOGD("set_parms META_STREAM_INFO " );
3693        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3694            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3695                    "Format:%d",
3696                    mStreamConfigInfo.type[i],
3697                    mStreamConfigInfo.stream_sizes[i].width,
3698                    mStreamConfigInfo.stream_sizes[i].height,
3699                    mStreamConfigInfo.postprocess_mask[i],
3700                    mStreamConfigInfo.format[i]);
3701        }
3702
3703        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3704                    mParameters);
3705        if (rc < 0) {
3706            LOGE("set_parms failed for hal version, stream info");
3707        }
3708
3709        cam_dimension_t sensor_dim;
3710        memset(&sensor_dim, 0, sizeof(sensor_dim));
3711        rc = getSensorOutputSize(sensor_dim);
3712        if (rc != NO_ERROR) {
3713            LOGE("Failed to get sensor output size");
3714            pthread_mutex_unlock(&mMutex);
3715            goto error_exit;
3716        }
3717
3718        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3719                gCamCapability[mCameraId]->active_array_size.height,
3720                sensor_dim.width, sensor_dim.height);
3721
3722        /* Set batchmode before initializing channel. Since registerBuffer
3723         * internally initializes some of the channels, better set batchmode
3724         * even before first register buffer */
3725        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3726            it != mStreamInfo.end(); it++) {
3727            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3728            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3729                    && mBatchSize) {
3730                rc = channel->setBatchSize(mBatchSize);
3731                //Disable per frame map unmap for HFR/batchmode case
3732                rc |= channel->setPerFrameMapUnmap(false);
3733                if (NO_ERROR != rc) {
3734                    LOGE("Channel init failed %d", rc);
3735                    pthread_mutex_unlock(&mMutex);
3736                    goto error_exit;
3737                }
3738            }
3739        }
3740
3741        //First initialize all streams
3742        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3743            it != mStreamInfo.end(); it++) {
3744            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3745            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3746               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3747               setEis)
3748                rc = channel->initialize(is_type);
3749            else {
3750                rc = channel->initialize(IS_TYPE_NONE);
3751            }
3752            if (NO_ERROR != rc) {
3753                LOGE("Channel initialization failed %d", rc);
3754                pthread_mutex_unlock(&mMutex);
3755                goto error_exit;
3756            }
3757        }
3758
3759        if (mRawDumpChannel) {
3760            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3761            if (rc != NO_ERROR) {
3762                LOGE("Error: Raw Dump Channel init failed");
3763                pthread_mutex_unlock(&mMutex);
3764                goto error_exit;
3765            }
3766        }
3767        if (mSupportChannel) {
3768            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3769            if (rc < 0) {
3770                LOGE("Support channel initialization failed");
3771                pthread_mutex_unlock(&mMutex);
3772                goto error_exit;
3773            }
3774        }
3775        if (mAnalysisChannel) {
3776            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3777            if (rc < 0) {
3778                LOGE("Analysis channel initialization failed");
3779                pthread_mutex_unlock(&mMutex);
3780                goto error_exit;
3781            }
3782        }
3783        if (mDummyBatchChannel) {
3784            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3785            if (rc < 0) {
3786                LOGE("mDummyBatchChannel setBatchSize failed");
3787                pthread_mutex_unlock(&mMutex);
3788                goto error_exit;
3789            }
3790            rc = mDummyBatchChannel->initialize(is_type);
3791            if (rc < 0) {
3792                LOGE("mDummyBatchChannel initialization failed");
3793                pthread_mutex_unlock(&mMutex);
3794                goto error_exit;
3795            }
3796        }
3797
3798        // Set bundle info
3799        rc = setBundleInfo();
3800        if (rc < 0) {
3801            LOGE("setBundleInfo failed %d", rc);
3802            pthread_mutex_unlock(&mMutex);
3803            goto error_exit;
3804        }
3805
3806        //update settings from app here
3807        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3808            mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3809            LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3810        }
3811        if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3812            mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3813            LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3814        }
3815        if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3816            mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3817            LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3818
3819            if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3820                (mLinkedCameraId != mCameraId) ) {
3821                LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3822                    mLinkedCameraId, mCameraId);
3823                goto error_exit;
3824            }
3825        }
3826
3827        // add bundle related cameras
3828        LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3829        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3830            if (mIsDeviceLinked)
3831                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3832            else
3833                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3834
3835            pthread_mutex_lock(&gCamLock);
3836
3837            if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3838                LOGE("Dualcam: Invalid Session Id ");
3839                pthread_mutex_unlock(&gCamLock);
3840                goto error_exit;
3841            }
3842
3843            if (mIsMainCamera == 1) {
3844                m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3845                m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3846                // related session id should be session id of linked session
3847                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3848            } else {
3849                m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3850                m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3851                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3852            }
3853            pthread_mutex_unlock(&gCamLock);
3854
3855            rc = mCameraHandle->ops->sync_related_sensors(
3856                    mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3857            if (rc < 0) {
3858                LOGE("Dualcam: link failed");
3859                goto error_exit;
3860            }
3861        }
3862
3863        //Then start them.
3864        LOGH("Start META Channel");
3865        rc = mMetadataChannel->start();
3866        if (rc < 0) {
3867            LOGE("META channel start failed");
3868            pthread_mutex_unlock(&mMutex);
3869            goto error_exit;
3870        }
3871
3872        if (mAnalysisChannel) {
3873            rc = mAnalysisChannel->start();
3874            if (rc < 0) {
3875                LOGE("Analysis channel start failed");
3876                mMetadataChannel->stop();
3877                pthread_mutex_unlock(&mMutex);
3878                goto error_exit;
3879            }
3880        }
3881
3882        if (mSupportChannel) {
3883            rc = mSupportChannel->start();
3884            if (rc < 0) {
3885                LOGE("Support channel start failed");
3886                mMetadataChannel->stop();
3887                /* Although support and analysis are mutually exclusive today
3888                   adding it in anycase for future proofing */
3889                if (mAnalysisChannel) {
3890                    mAnalysisChannel->stop();
3891                }
3892                pthread_mutex_unlock(&mMutex);
3893                goto error_exit;
3894            }
3895        }
3896        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3897            it != mStreamInfo.end(); it++) {
3898            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3899            LOGH("Start Processing Channel mask=%d",
3900                     channel->getStreamTypeMask());
3901            rc = channel->start();
3902            if (rc < 0) {
3903                LOGE("channel start failed");
3904                pthread_mutex_unlock(&mMutex);
3905                goto error_exit;
3906            }
3907        }
3908
3909        if (mRawDumpChannel) {
3910            LOGD("Starting raw dump stream");
3911            rc = mRawDumpChannel->start();
3912            if (rc != NO_ERROR) {
3913                LOGE("Error Starting Raw Dump Channel");
3914                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3915                      it != mStreamInfo.end(); it++) {
3916                    QCamera3Channel *channel =
3917                        (QCamera3Channel *)(*it)->stream->priv;
3918                    LOGH("Stopping Processing Channel mask=%d",
3919                        channel->getStreamTypeMask());
3920                    channel->stop();
3921                }
3922                if (mSupportChannel)
3923                    mSupportChannel->stop();
3924                if (mAnalysisChannel) {
3925                    mAnalysisChannel->stop();
3926                }
3927                mMetadataChannel->stop();
3928                pthread_mutex_unlock(&mMutex);
3929                goto error_exit;
3930            }
3931        }
3932
3933        if (mChannelHandle) {
3934
3935            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3936                    mChannelHandle);
3937            if (rc != NO_ERROR) {
3938                LOGE("start_channel failed %d", rc);
3939                pthread_mutex_unlock(&mMutex);
3940                goto error_exit;
3941            }
3942        }
3943
3944        goto no_error;
3945error_exit:
3946        m_perfLock.lock_rel();
3947        return rc;
3948no_error:
3949        m_perfLock.lock_rel();
3950
3951        mWokenUpByDaemon = false;
3952        mPendingLiveRequest = 0;
3953        mFirstConfiguration = false;
3954        enablePowerHint();
3955    }
3956
3957    uint32_t frameNumber = request->frame_number;
3958    cam_stream_ID_t streamsArray;
3959
3960    if (mFlushPerf) {
3961        //we cannot accept any requests during flush
3962        LOGE("process_capture_request cannot proceed during flush");
3963        pthread_mutex_unlock(&mMutex);
3964        return NO_ERROR; //should return an error
3965    }
3966
3967    if (meta.exists(ANDROID_REQUEST_ID)) {
3968        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3969        mCurrentRequestId = request_id;
3970        LOGD("Received request with id: %d", request_id);
3971    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3972        LOGE("Unable to find request id field, \
3973                & no previous id available");
3974        pthread_mutex_unlock(&mMutex);
3975        return NAME_NOT_FOUND;
3976    } else {
3977        LOGD("Re-using old request id");
3978        request_id = mCurrentRequestId;
3979    }
3980
3981    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3982                                    request->num_output_buffers,
3983                                    request->input_buffer,
3984                                    frameNumber);
3985    // Acquire all request buffers first
3986    streamsArray.num_streams = 0;
3987    int blob_request = 0;
3988    uint32_t snapshotStreamId = 0;
3989    for (size_t i = 0; i < request->num_output_buffers; i++) {
3990        const camera3_stream_buffer_t& output = request->output_buffers[i];
3991        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3992
3993        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3994            //Call function to store local copy of jpeg data for encode params.
3995            blob_request = 1;
3996            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3997        }
3998
3999        if (output.acquire_fence != -1) {
4000           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4001           close(output.acquire_fence);
4002           if (rc != OK) {
4003              LOGE("sync wait failed %d", rc);
4004              pthread_mutex_unlock(&mMutex);
4005              return rc;
4006           }
4007        }
4008
4009        streamsArray.stream_request[streamsArray.num_streams++].streamID =
4010            channel->getStreamID(channel->getStreamTypeMask());
4011
4012        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4013            isVidBufRequested = true;
4014        }
4015    }
4016
4017    if (blob_request) {
4018        KPI_ATRACE_INT("SNAPSHOT", 1);
4019    }
4020    if (blob_request && mRawDumpChannel) {
4021        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
4022        streamsArray.stream_request[streamsArray.num_streams].streamID =
4023            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
4024        streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4025    }
4026
4027    if(request->input_buffer == NULL) {
4028        /* Parse the settings:
4029         * - For every request in NORMAL MODE
4030         * - For every request in HFR mode during preview only case
4031         * - For first request of every batch in HFR mode during video
4032         * recording. In batchmode the same settings except frame number is
4033         * repeated in each request of the batch.
4034         */
4035        if (!mBatchSize ||
4036           (mBatchSize && !isVidBufRequested) ||
4037           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
4038            rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4039            if (rc < 0) {
4040                LOGE("fail to set frame parameters");
4041                pthread_mutex_unlock(&mMutex);
4042                return rc;
4043            }
4044        }
4045        /* For batchMode HFR, setFrameParameters is not called for every
4046         * request. But only frame number of the latest request is parsed.
4047         * Keep track of first and last frame numbers in a batch so that
4048         * metadata for the frame numbers of batch can be duplicated in
4049         * handleBatchMetadta */
4050        if (mBatchSize) {
4051            if (!mToBeQueuedVidBufs) {
4052                //start of the batch
4053                mFirstFrameNumberInBatch = request->frame_number;
4054            }
4055            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4056                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4057                LOGE("Failed to set the frame number in the parameters");
4058                return BAD_VALUE;
4059            }
4060        }
4061        if (mNeedSensorRestart) {
4062            /* Unlock the mutex as restartSensor waits on the channels to be
4063             * stopped, which in turn calls stream callback functions -
4064             * handleBufferWithLock and handleMetadataWithLock */
4065            pthread_mutex_unlock(&mMutex);
4066            rc = dynamicUpdateMetaStreamInfo();
4067            if (rc != NO_ERROR) {
4068                LOGE("Restarting the sensor failed");
4069                return BAD_VALUE;
4070            }
4071            mNeedSensorRestart = false;
4072            pthread_mutex_lock(&mMutex);
4073        }
4074    } else {
4075
4076        if (request->input_buffer->acquire_fence != -1) {
4077           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4078           close(request->input_buffer->acquire_fence);
4079           if (rc != OK) {
4080              LOGE("input buffer sync wait failed %d", rc);
4081              pthread_mutex_unlock(&mMutex);
4082              return rc;
4083           }
4084        }
4085    }
4086
4087    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4088        mLastCustIntentFrmNum = frameNumber;
4089    }
4090    /* Update pending request list and pending buffers map */
4091    PendingRequestInfo pendingRequest;
4092    pendingRequestIterator latestRequest;
4093    pendingRequest.frame_number = frameNumber;
4094    pendingRequest.num_buffers = request->num_output_buffers;
4095    pendingRequest.request_id = request_id;
4096    pendingRequest.blob_request = blob_request;
4097    pendingRequest.timestamp = 0;
4098    pendingRequest.bUrgentReceived = 0;
4099    if (request->input_buffer) {
4100        pendingRequest.input_buffer =
4101                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4102        *(pendingRequest.input_buffer) = *(request->input_buffer);
4103        pInputBuffer = pendingRequest.input_buffer;
4104    } else {
4105       pendingRequest.input_buffer = NULL;
4106       pInputBuffer = NULL;
4107    }
4108
4109    pendingRequest.pipeline_depth = 0;
4110    pendingRequest.partial_result_cnt = 0;
4111    extractJpegMetadata(mCurJpegMeta, request);
4112    pendingRequest.jpegMetadata = mCurJpegMeta;
4113    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4114    pendingRequest.shutter_notified = false;
4115
4116    //extract capture intent
4117    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4118        mCaptureIntent =
4119                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4120    }
4121    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
4122        mHybridAeEnable =
4123                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
4124    }
4125    pendingRequest.capture_intent = mCaptureIntent;
4126    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
4127    /* DevCamDebug metadata processCaptureRequest */
4128    if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4129        mDevCamDebugMetaEnable =
4130                meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4131    }
4132    pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4133    /* DevCamDebug metadata end */
4134
4135    //extract CAC info
4136    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4137        mCacMode =
4138                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4139    }
4140    pendingRequest.fwkCacMode = mCacMode;
4141
4142    PendingBuffersInRequest bufsForCurRequest;
4143    bufsForCurRequest.frame_number = frameNumber;
4144    // Mark current timestamp for the new request
4145    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4146
4147    for (size_t i = 0; i < request->num_output_buffers; i++) {
4148        RequestedBufferInfo requestedBuf;
4149        memset(&requestedBuf, 0, sizeof(requestedBuf));
4150        requestedBuf.stream = request->output_buffers[i].stream;
4151        requestedBuf.buffer = NULL;
4152        pendingRequest.buffers.push_back(requestedBuf);
4153
4154        // Add to buffer handle the pending buffers list
4155        PendingBufferInfo bufferInfo;
4156        bufferInfo.buffer = request->output_buffers[i].buffer;
4157        bufferInfo.stream = request->output_buffers[i].stream;
4158        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4159        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4160        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4161            frameNumber, bufferInfo.buffer,
4162            channel->getStreamTypeMask(), bufferInfo.stream->format);
4163    }
4164    // Add this request packet into mPendingBuffersMap
4165    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4166    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4167        mPendingBuffersMap.get_num_overall_buffers());
4168
4169    latestRequest = mPendingRequestsList.insert(
4170            mPendingRequestsList.end(), pendingRequest);
4171    if(mFlush) {
4172        LOGI("mFlush is true");
4173        pthread_mutex_unlock(&mMutex);
4174        return NO_ERROR;
4175    }
4176
4177    int indexUsed;
4178    // Notify metadata channel we receive a request
4179    mMetadataChannel->request(NULL, frameNumber, indexUsed);
4180
4181    if(request->input_buffer != NULL){
4182        LOGD("Input request, frame_number %d", frameNumber);
4183        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4184        if (NO_ERROR != rc) {
4185            LOGE("fail to set reproc parameters");
4186            pthread_mutex_unlock(&mMutex);
4187            return rc;
4188        }
4189    }
4190
4191    // Call request on other streams
4192    uint32_t streams_need_metadata = 0;
4193    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4194    for (size_t i = 0; i < request->num_output_buffers; i++) {
4195        const camera3_stream_buffer_t& output = request->output_buffers[i];
4196        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4197
4198        if (channel == NULL) {
4199            LOGW("invalid channel pointer for stream");
4200            continue;
4201        }
4202
4203        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4204            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4205                      output.buffer, request->input_buffer, frameNumber);
4206            if(request->input_buffer != NULL){
4207                rc = channel->request(output.buffer, frameNumber,
4208                        pInputBuffer, &mReprocMeta, indexUsed);
4209                if (rc < 0) {
4210                    LOGE("Fail to request on picture channel");
4211                    pthread_mutex_unlock(&mMutex);
4212                    return rc;
4213                }
4214            } else {
4215                LOGD("snapshot request with buffer %p, frame_number %d",
4216                         output.buffer, frameNumber);
4217                if (!request->settings) {
4218                    rc = channel->request(output.buffer, frameNumber,
4219                            NULL, mPrevParameters, indexUsed);
4220                } else {
4221                    rc = channel->request(output.buffer, frameNumber,
4222                            NULL, mParameters, indexUsed);
4223                }
4224                if (rc < 0) {
4225                    LOGE("Fail to request on picture channel");
4226                    pthread_mutex_unlock(&mMutex);
4227                    return rc;
4228                }
4229
4230                uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4231                uint32_t j = 0;
4232                for (j = 0; j < streamsArray.num_streams; j++) {
4233                    if (streamsArray.stream_request[j].streamID == streamId) {
4234                      if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4235                          streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4236                      else
4237                          streamsArray.stream_request[j].buf_index = indexUsed;
4238                        break;
4239                    }
4240                }
4241                if (j == streamsArray.num_streams) {
4242                    LOGE("Did not find matching stream to update index");
4243                    assert(0);
4244                }
4245
4246                pendingBufferIter->need_metadata = true;
4247                streams_need_metadata++;
4248            }
4249        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4250            bool needMetadata = false;
4251            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4252            rc = yuvChannel->request(output.buffer, frameNumber,
4253                    pInputBuffer,
4254                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
4255            if (rc < 0) {
4256                LOGE("Fail to request on YUV channel");
4257                pthread_mutex_unlock(&mMutex);
4258                return rc;
4259            }
4260
4261            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4262            uint32_t j = 0;
4263            for (j = 0; j < streamsArray.num_streams; j++) {
4264                if (streamsArray.stream_request[j].streamID == streamId) {
4265                    if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4266                        streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4267                    else
4268                        streamsArray.stream_request[j].buf_index = indexUsed;
4269                    break;
4270                }
4271            }
4272            if (j == streamsArray.num_streams) {
4273                LOGE("Did not find matching stream to update index");
4274                assert(0);
4275            }
4276
4277            pendingBufferIter->need_metadata = needMetadata;
4278            if (needMetadata)
4279                streams_need_metadata += 1;
4280            LOGD("calling YUV channel request, need_metadata is %d",
4281                     needMetadata);
4282        } else {
4283            LOGD("request with buffer %p, frame_number %d",
4284                  output.buffer, frameNumber);
4285
4286            rc = channel->request(output.buffer, frameNumber, indexUsed);
4287
4288            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4289            uint32_t j = 0;
4290            for (j = 0; j < streamsArray.num_streams; j++) {
4291                if (streamsArray.stream_request[j].streamID == streamId) {
4292                    if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4293                        streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4294                    else
4295                        streamsArray.stream_request[j].buf_index = indexUsed;
4296                    break;
4297                }
4298            }
4299            if (j == streamsArray.num_streams) {
4300                LOGE("Did not find matching stream to update index");
4301                assert(0);
4302            }
4303
4304            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4305                    && mBatchSize) {
4306                mToBeQueuedVidBufs++;
4307                if (mToBeQueuedVidBufs == mBatchSize) {
4308                    channel->queueBatchBuf();
4309                }
4310            }
4311            if (rc < 0) {
4312                LOGE("request failed");
4313                pthread_mutex_unlock(&mMutex);
4314                return rc;
4315            }
4316        }
4317        pendingBufferIter++;
4318    }
4319
4320    //If 2 streams have need_metadata set to true, fail the request, unless
4321    //we copy/reference count the metadata buffer
4322    if (streams_need_metadata > 1) {
4323        LOGE("not supporting request in which two streams requires"
4324                " 2 HAL metadata for reprocessing");
4325        pthread_mutex_unlock(&mMutex);
4326        return -EINVAL;
4327    }
4328
4329    if (request->input_buffer == NULL) {
4330        /* Set the parameters to backend:
4331         * - For every request in NORMAL MODE
4332         * - For every request in HFR mode during preview only case
4333         * - Once every batch in HFR mode during video recording
4334         */
4335        if (!mBatchSize ||
4336           (mBatchSize && !isVidBufRequested) ||
4337           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4338            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4339                     mBatchSize, isVidBufRequested,
4340                    mToBeQueuedVidBufs);
4341
4342            if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4343                for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4344                    uint32_t m = 0;
4345                    for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4346                        if (streamsArray.stream_request[k].streamID ==
4347                                mBatchedStreamsArray.stream_request[m].streamID)
4348                            break;
4349                        }
4350                        if (m == mBatchedStreamsArray.num_streams) {
4351                            mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4352                                streamsArray.stream_request[k].streamID;
4353                            mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4354                                streamsArray.stream_request[k].buf_index;
4355                            mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4356                        }
4357                }
4358                streamsArray = mBatchedStreamsArray;
4359            }
4360            /* Update stream id of all the requested buffers */
4361            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4362                LOGE("Failed to set stream type mask in the parameters");
4363                return BAD_VALUE;
4364            }
4365
4366            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4367                    mParameters);
4368            if (rc < 0) {
4369                LOGE("set_parms failed");
4370            }
4371            /* reset to zero coz, the batch is queued */
4372            mToBeQueuedVidBufs = 0;
4373            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4374            memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4375        } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4376            for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4377                uint32_t m = 0;
4378                for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4379                    if (streamsArray.stream_request[k].streamID ==
4380                            mBatchedStreamsArray.stream_request[m].streamID)
4381                        break;
4382                }
4383                if (m == mBatchedStreamsArray.num_streams) {
4384                    mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4385                        streamsArray.stream_request[k].streamID;
4386                    mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4387                        streamsArray.stream_request[k].buf_index;
4388                    mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4389                }
4390            }
4391        }
4392        mPendingLiveRequest++;
4393    }
4394
4395    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4396
4397    mState = STARTED;
4398    // Added a timed condition wait
4399    struct timespec ts;
4400    uint8_t isValidTimeout = 1;
4401    rc = clock_gettime(CLOCK_REALTIME, &ts);
4402    if (rc < 0) {
4403      isValidTimeout = 0;
4404      LOGE("Error reading the real time clock!!");
4405    }
4406    else {
4407      // Make timeout as 5 sec for request to be honored
4408      ts.tv_sec += 5;
4409    }
4410    //Block on conditional variable
4411    while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4412            (mState != ERROR) && (mState != DEINIT)) {
4413        if (!isValidTimeout) {
4414            LOGD("Blocking on conditional wait");
4415            pthread_cond_wait(&mRequestCond, &mMutex);
4416        }
4417        else {
4418            LOGD("Blocking on timed conditional wait");
4419            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4420            if (rc == ETIMEDOUT) {
4421                rc = -ENODEV;
4422                LOGE("Unblocked on timeout!!!!");
4423                break;
4424            }
4425        }
4426        LOGD("Unblocked");
4427        if (mWokenUpByDaemon) {
4428            mWokenUpByDaemon = false;
4429            if (mPendingLiveRequest < mMaxInFlightRequests)
4430                break;
4431        }
4432    }
4433    pthread_mutex_unlock(&mMutex);
4434
4435    return rc;
4436}
4437
4438/*===========================================================================
4439 * FUNCTION   : dump
4440 *
4441 * DESCRIPTION:
4442 *
4443 * PARAMETERS :
4444 *
4445 *
4446 * RETURN     :
4447 *==========================================================================*/
4448void QCamera3HardwareInterface::dump(int fd)
4449{
4450    pthread_mutex_lock(&mMutex);
4451    dprintf(fd, "\n Camera HAL3 information Begin \n");
4452
4453    dprintf(fd, "\nNumber of pending requests: %zu \n",
4454        mPendingRequestsList.size());
4455    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4456    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4457    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4458    for(pendingRequestIterator i = mPendingRequestsList.begin();
4459            i != mPendingRequestsList.end(); i++) {
4460        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4461        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4462        i->input_buffer);
4463    }
4464    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4465                mPendingBuffersMap.get_num_overall_buffers());
4466    dprintf(fd, "-------+------------------\n");
4467    dprintf(fd, " Frame | Stream type mask \n");
4468    dprintf(fd, "-------+------------------\n");
4469    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4470        for(auto &j : req.mPendingBufferList) {
4471            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4472            dprintf(fd, " %5d | %11d \n",
4473                    req.frame_number, channel->getStreamTypeMask());
4474        }
4475    }
4476    dprintf(fd, "-------+------------------\n");
4477
4478    dprintf(fd, "\nPending frame drop list: %zu\n",
4479        mPendingFrameDropList.size());
4480    dprintf(fd, "-------+-----------\n");
4481    dprintf(fd, " Frame | Stream ID \n");
4482    dprintf(fd, "-------+-----------\n");
4483    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4484        i != mPendingFrameDropList.end(); i++) {
4485        dprintf(fd, " %5d | %9d \n",
4486            i->frame_number, i->stream_ID);
4487    }
4488    dprintf(fd, "-------+-----------\n");
4489
4490    dprintf(fd, "\n Camera HAL3 information End \n");
4491
4492    /* use dumpsys media.camera as trigger to send update debug level event */
4493    mUpdateDebugLevel = true;
4494    pthread_mutex_unlock(&mMutex);
4495    return;
4496}
4497
4498/*===========================================================================
4499 * FUNCTION   : flush
4500 *
4501 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4502 *              conditionally restarts channels
4503 *
4504 * PARAMETERS :
4505 *  @ restartChannels: re-start all channels
4506 *
4507 *
4508 * RETURN     :
4509 *          0 on success
4510 *          Error code on failure
4511 *==========================================================================*/
4512int QCamera3HardwareInterface::flush(bool restartChannels)
4513{
4514    KPI_ATRACE_CALL();
4515    int32_t rc = NO_ERROR;
4516
4517    LOGD("Unblocking Process Capture Request");
4518    pthread_mutex_lock(&mMutex);
4519    mFlush = true;
4520    pthread_mutex_unlock(&mMutex);
4521
4522    rc = stopAllChannels();
4523    // unlink of dualcam
4524    if (mIsDeviceLinked) {
4525        m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4526        pthread_mutex_lock(&gCamLock);
4527
4528        if (mIsMainCamera == 1) {
4529            m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4530            m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4531            // related session id should be session id of linked session
4532            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4533        } else {
4534            m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4535            m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4536            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4537        }
4538        pthread_mutex_unlock(&gCamLock);
4539
4540        rc = mCameraHandle->ops->sync_related_sensors(
4541                mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4542        if (rc < 0) {
4543            LOGE("Dualcam: Unlink failed, but still proceed to close");
4544        }
4545    }
4546
4547    if (rc < 0) {
4548        LOGE("stopAllChannels failed");
4549        return rc;
4550    }
4551    if (mChannelHandle) {
4552        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4553                mChannelHandle);
4554    }
4555
4556    // Reset bundle info
4557    rc = setBundleInfo();
4558    if (rc < 0) {
4559        LOGE("setBundleInfo failed %d", rc);
4560        return rc;
4561    }
4562
4563    // Mutex Lock
4564    pthread_mutex_lock(&mMutex);
4565
4566    // Unblock process_capture_request
4567    mPendingLiveRequest = 0;
4568    pthread_cond_signal(&mRequestCond);
4569
4570    rc = notifyErrorForPendingRequests();
4571    if (rc < 0) {
4572        LOGE("notifyErrorForPendingRequests failed");
4573        pthread_mutex_unlock(&mMutex);
4574        return rc;
4575    }
4576
4577    mFlush = false;
4578
4579    // Start the Streams/Channels
4580    if (restartChannels) {
4581        rc = startAllChannels();
4582        if (rc < 0) {
4583            LOGE("startAllChannels failed");
4584            pthread_mutex_unlock(&mMutex);
4585            return rc;
4586        }
4587    }
4588
4589    if (mChannelHandle) {
4590        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4591                    mChannelHandle);
4592        if (rc < 0) {
4593            LOGE("start_channel failed");
4594            pthread_mutex_unlock(&mMutex);
4595            return rc;
4596        }
4597    }
4598
4599    pthread_mutex_unlock(&mMutex);
4600
4601    return 0;
4602}
4603
4604/*===========================================================================
4605 * FUNCTION   : flushPerf
4606 *
4607 * DESCRIPTION: This is the performance optimization version of flush that does
4608 *              not use stream off, rather flushes the system
4609 *
4610 * PARAMETERS :
4611 *
4612 *
4613 * RETURN     : 0 : success
4614 *              -EINVAL: input is malformed (device is not valid)
4615 *              -ENODEV: if the device has encountered a serious error
4616 *==========================================================================*/
4617int QCamera3HardwareInterface::flushPerf()
4618{
4619    ATRACE_CALL();
4620    int32_t rc = 0;
4621    struct timespec timeout;
4622    bool timed_wait = false;
4623
4624    pthread_mutex_lock(&mMutex);
4625    mFlushPerf = true;
4626    mPendingBuffersMap.numPendingBufsAtFlush =
4627        mPendingBuffersMap.get_num_overall_buffers();
4628    LOGD("Calling flush. Wait for %d buffers to return",
4629        mPendingBuffersMap.numPendingBufsAtFlush);
4630
4631    /* send the flush event to the backend */
4632    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4633    if (rc < 0) {
4634        LOGE("Error in flush: IOCTL failure");
4635        mFlushPerf = false;
4636        pthread_mutex_unlock(&mMutex);
4637        return -ENODEV;
4638    }
4639
4640    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4641        LOGD("No pending buffers in HAL, return flush");
4642        mFlushPerf = false;
4643        pthread_mutex_unlock(&mMutex);
4644        return rc;
4645    }
4646
4647    /* wait on a signal that buffers were received */
4648    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4649    if (rc < 0) {
4650        LOGE("Error reading the real time clock, cannot use timed wait");
4651    } else {
4652        timeout.tv_sec += FLUSH_TIMEOUT;
4653        timed_wait = true;
4654    }
4655
4656    //Block on conditional variable
4657    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4658        LOGD("Waiting on mBuffersCond");
4659        if (!timed_wait) {
4660            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4661            if (rc != 0) {
4662                 LOGE("pthread_cond_wait failed due to rc = %s",
4663                        strerror(rc));
4664                 break;
4665            }
4666        } else {
4667            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4668            if (rc != 0) {
4669                LOGE("pthread_cond_timedwait failed due to rc = %s",
4670                            strerror(rc));
4671                break;
4672            }
4673        }
4674    }
4675    if (rc != 0) {
4676        mFlushPerf = false;
4677        pthread_mutex_unlock(&mMutex);
4678        return -ENODEV;
4679    }
4680
4681    LOGD("Received buffers, now safe to return them");
4682
4683    //make sure the channels handle flush
4684    //currently only required for the picture channel to release snapshot resources
4685    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4686            it != mStreamInfo.end(); it++) {
4687        QCamera3Channel *channel = (*it)->channel;
4688        if (channel) {
4689            rc = channel->flush();
4690            if (rc) {
4691               LOGE("Flushing the channels failed with error %d", rc);
4692               // even though the channel flush failed we need to continue and
4693               // return the buffers we have to the framework, however the return
4694               // value will be an error
4695               rc = -ENODEV;
4696            }
4697        }
4698    }
4699
4700    /* notify the frameworks and send errored results */
4701    rc = notifyErrorForPendingRequests();
4702    if (rc < 0) {
4703        LOGE("notifyErrorForPendingRequests failed");
4704        pthread_mutex_unlock(&mMutex);
4705        return rc;
4706    }
4707
4708    //unblock process_capture_request
4709    mPendingLiveRequest = 0;
4710    unblockRequestIfNecessary();
4711
4712    mFlushPerf = false;
4713    pthread_mutex_unlock(&mMutex);
4714    LOGD ("Flush Operation complete. rc = %d", rc);
4715    return rc;
4716}
4717
4718/*===========================================================================
4719 * FUNCTION   : handleCameraDeviceError
4720 *
4721 * DESCRIPTION: This function calls internal flush and notifies the error to
4722 *              framework and updates the state variable.
4723 *
4724 * PARAMETERS : None
4725 *
4726 * RETURN     : NO_ERROR on Success
4727 *              Error code on failure
4728 *==========================================================================*/
4729int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4730{
4731    int32_t rc = NO_ERROR;
4732
4733    pthread_mutex_lock(&mMutex);
4734    if (mState != ERROR) {
4735        //if mState != ERROR, nothing to be done
4736        pthread_mutex_unlock(&mMutex);
4737        return NO_ERROR;
4738    }
4739    pthread_mutex_unlock(&mMutex);
4740
4741    rc = flush(false /* restart channels */);
4742    if (NO_ERROR != rc) {
4743        LOGE("internal flush to handle mState = ERROR failed");
4744    }
4745
4746    pthread_mutex_lock(&mMutex);
4747    mState = DEINIT;
4748    pthread_mutex_unlock(&mMutex);
4749
4750    camera3_notify_msg_t notify_msg;
4751    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4752    notify_msg.type = CAMERA3_MSG_ERROR;
4753    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4754    notify_msg.message.error.error_stream = NULL;
4755    notify_msg.message.error.frame_number = 0;
4756    mCallbackOps->notify(mCallbackOps, &notify_msg);
4757
4758    return rc;
4759}
4760
4761/*===========================================================================
4762 * FUNCTION   : captureResultCb
4763 *
4764 * DESCRIPTION: Callback handler for all capture result
4765 *              (streams, as well as metadata)
4766 *
4767 * PARAMETERS :
4768 *   @metadata : metadata information
4769 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4770 *               NULL if metadata.
4771 *
4772 * RETURN     : NONE
4773 *==========================================================================*/
4774void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4775                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4776{
4777    if (metadata_buf) {
4778        pthread_mutex_lock(&mMutex);
4779        uint8_t batchSize = mBatchSize;
4780        pthread_mutex_unlock(&mMutex);
4781        if (batchSize) {
4782            handleBatchMetadata(metadata_buf,
4783                    true /* free_and_bufdone_meta_buf */);
4784        } else { /* mBatchSize = 0 */
4785            hdrPlusPerfLock(metadata_buf);
4786            pthread_mutex_lock(&mMutex);
4787            handleMetadataWithLock(metadata_buf,
4788                    true /* free_and_bufdone_meta_buf */,
4789                    false /* first frame of batch metadata */ );
4790            pthread_mutex_unlock(&mMutex);
4791        }
4792    } else if (isInputBuffer) {
4793        pthread_mutex_lock(&mMutex);
4794        handleInputBufferWithLock(frame_number);
4795        pthread_mutex_unlock(&mMutex);
4796    } else {
4797        pthread_mutex_lock(&mMutex);
4798        handleBufferWithLock(buffer, frame_number);
4799        pthread_mutex_unlock(&mMutex);
4800    }
4801    return;
4802}
4803
4804/*===========================================================================
4805 * FUNCTION   : getReprocessibleOutputStreamId
4806 *
4807 * DESCRIPTION: Get source output stream id for the input reprocess stream
4808 *              based on size and format, which would be the largest
4809 *              output stream if an input stream exists.
4810 *
4811 * PARAMETERS :
4812 *   @id      : return the stream id if found
4813 *
4814 * RETURN     : int32_t type of status
4815 *              NO_ERROR  -- success
4816 *              none-zero failure code
4817 *==========================================================================*/
4818int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4819{
4820    /* check if any output or bidirectional stream with the same size and format
4821       and return that stream */
4822    if ((mInputStreamInfo.dim.width > 0) &&
4823            (mInputStreamInfo.dim.height > 0)) {
4824        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4825                it != mStreamInfo.end(); it++) {
4826
4827            camera3_stream_t *stream = (*it)->stream;
4828            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4829                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4830                    (stream->format == mInputStreamInfo.format)) {
4831                // Usage flag for an input stream and the source output stream
4832                // may be different.
4833                LOGD("Found reprocessible output stream! %p", *it);
4834                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4835                         stream->usage, mInputStreamInfo.usage);
4836
4837                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4838                if (channel != NULL && channel->mStreams[0]) {
4839                    id = channel->mStreams[0]->getMyServerID();
4840                    return NO_ERROR;
4841                }
4842            }
4843        }
4844    } else {
4845        LOGD("No input stream, so no reprocessible output stream");
4846    }
4847    return NAME_NOT_FOUND;
4848}
4849
4850/*===========================================================================
4851 * FUNCTION   : lookupFwkName
4852 *
4853 * DESCRIPTION: In case the enum is not same in fwk and backend
4854 *              make sure the parameter is correctly propogated
4855 *
4856 * PARAMETERS  :
4857 *   @arr      : map between the two enums
4858 *   @len      : len of the map
4859 *   @hal_name : name of the hal_parm to map
4860 *
4861 * RETURN     : int type of status
4862 *              fwk_name  -- success
4863 *              none-zero failure code
4864 *==========================================================================*/
4865template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4866        size_t len, halType hal_name)
4867{
4868
4869    for (size_t i = 0; i < len; i++) {
4870        if (arr[i].hal_name == hal_name) {
4871            return arr[i].fwk_name;
4872        }
4873    }
4874
4875    /* Not able to find matching framework type is not necessarily
4876     * an error case. This happens when mm-camera supports more attributes
4877     * than the frameworks do */
4878    LOGH("Cannot find matching framework type");
4879    return NAME_NOT_FOUND;
4880}
4881
4882/*===========================================================================
4883 * FUNCTION   : lookupHalName
4884 *
4885 * DESCRIPTION: In case the enum is not same in fwk and backend
4886 *              make sure the parameter is correctly propogated
4887 *
4888 * PARAMETERS  :
4889 *   @arr      : map between the two enums
4890 *   @len      : len of the map
4891 *   @fwk_name : name of the hal_parm to map
4892 *
4893 * RETURN     : int32_t type of status
4894 *              hal_name  -- success
4895 *              none-zero failure code
4896 *==========================================================================*/
4897template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4898        size_t len, fwkType fwk_name)
4899{
4900    for (size_t i = 0; i < len; i++) {
4901        if (arr[i].fwk_name == fwk_name) {
4902            return arr[i].hal_name;
4903        }
4904    }
4905
4906    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4907    return NAME_NOT_FOUND;
4908}
4909
4910/*===========================================================================
4911 * FUNCTION   : lookupProp
4912 *
4913 * DESCRIPTION: lookup a value by its name
4914 *
4915 * PARAMETERS :
4916 *   @arr     : map between the two enums
4917 *   @len     : size of the map
4918 *   @name    : name to be looked up
4919 *
4920 * RETURN     : Value if found
4921 *              CAM_CDS_MODE_MAX if not found
4922 *==========================================================================*/
4923template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4924        size_t len, const char *name)
4925{
4926    if (name) {
4927        for (size_t i = 0; i < len; i++) {
4928            if (!strcmp(arr[i].desc, name)) {
4929                return arr[i].val;
4930            }
4931        }
4932    }
4933    return CAM_CDS_MODE_MAX;
4934}
4935
4936/*===========================================================================
4937 *
4938 * DESCRIPTION:
4939 *
4940 * PARAMETERS :
4941 *   @metadata : metadata information from callback
4942 *   @timestamp: metadata buffer timestamp
4943 *   @request_id: request id
4944 *   @jpegMetadata: additional jpeg metadata
4945 *   @hybrid_ae_enable: whether hybrid ae is enabled
4946 *   // DevCamDebug metadata
4947 *   @DevCamDebug_meta_enable: enable DevCamDebug meta
4948 *   // DevCamDebug metadata end
4949 *   @pprocDone: whether internal offline postprocsesing is done
4950 *
4951 * RETURN     : camera_metadata_t*
4952 *              metadata in a format specified by fwk
4953 *==========================================================================*/
4954camera_metadata_t*
4955QCamera3HardwareInterface::translateFromHalMetadata(
4956                                 metadata_buffer_t *metadata,
4957                                 nsecs_t timestamp,
4958                                 int32_t request_id,
4959                                 const CameraMetadata& jpegMetadata,
4960                                 uint8_t pipeline_depth,
4961                                 uint8_t capture_intent,
4962                                 uint8_t hybrid_ae_enable,
4963                                 /* DevCamDebug metadata translateFromHalMetadata argument */
4964                                 uint8_t DevCamDebug_meta_enable,
4965                                 /* DevCamDebug metadata end */
4966                                 bool pprocDone,
4967                                 uint8_t fwk_cacMode,
4968                                 bool firstMetadataInBatch)
4969{
4970    CameraMetadata camMetadata;
4971    camera_metadata_t *resultMetadata;
4972
4973    if (mBatchSize && !firstMetadataInBatch) {
4974        /* In batch mode, use cached metadata from the first metadata
4975            in the batch */
4976        camMetadata.clear();
4977        camMetadata = mCachedMetadata;
4978    }
4979
4980    if (jpegMetadata.entryCount())
4981        camMetadata.append(jpegMetadata);
4982
4983    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4984    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4985    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4986    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4987    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4988    if (mBatchSize == 0) {
4989        // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
4990        camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
4991    }
4992
4993    if (mBatchSize && !firstMetadataInBatch) {
4994        /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4995        resultMetadata = camMetadata.release();
4996        return resultMetadata;
4997    }
4998
4999    // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5000    // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5001    if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5002        // DevCamDebug metadata translateFromHalMetadata AF
5003        IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5004                CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5005            int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5006            camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5007        }
5008        IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5009                CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5010            int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5011            camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5012        }
5013        IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5014                CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5015            int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5016            camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5017        }
5018        IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5019                CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5020            int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5021            camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5022        }
5023        IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5024                CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5025            int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5026            camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5027        }
5028        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5029                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5030            int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5031                *DevCamDebug_af_monitor_pdaf_target_pos;
5032            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5033                &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5034        }
5035        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5036                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5037            int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5038                *DevCamDebug_af_monitor_pdaf_confidence;
5039            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5040                &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5041        }
5042        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5043                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5044            int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5045            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5046                &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5047        }
5048        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5049                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5050            int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5051                *DevCamDebug_af_monitor_tof_target_pos;
5052            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5053                &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5054        }
5055        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5056                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5057            int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5058                *DevCamDebug_af_monitor_tof_confidence;
5059            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5060                &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5061        }
5062        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5063                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5064            int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5065            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5066                &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5067        }
5068        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5069                CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5070            int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5071            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5072                &fwk_DevCamDebug_af_monitor_type_select, 1);
5073        }
5074        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5075                CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5076            int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5077            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5078                &fwk_DevCamDebug_af_monitor_refocus, 1);
5079        }
5080        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5081                CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5082            int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5083            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5084                &fwk_DevCamDebug_af_monitor_target_pos, 1);
5085        }
5086        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5087                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5088            int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5089                *DevCamDebug_af_search_pdaf_target_pos;
5090            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5091                &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5092        }
5093        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5094                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5095            int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5096            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5097                &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5098        }
5099        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5100                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5101            int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5102            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5103                &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5104        }
5105        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5106                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5107            int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5108            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5109                &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5110        }
5111        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5112                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5113            int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5114            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5115                &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5116        }
5117        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5118                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5119            int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5120                *DevCamDebug_af_search_tof_target_pos;
5121            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5122                &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5123        }
5124        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5125                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5126            int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5127            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5128                &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5129        }
5130        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5131                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5132            int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5133            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5134                &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5135        }
5136        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5137                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5138            int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5139            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5140                &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5141        }
5142        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5143                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5144            int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5145            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5146                &fwk_DevCamDebug_af_search_tof_confidence, 1);
5147        }
5148        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5149                CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5150            int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5151            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5152                &fwk_DevCamDebug_af_search_type_select, 1);
5153        }
5154        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5155                CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5156            int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5157            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5158                &fwk_DevCamDebug_af_search_next_pos, 1);
5159        }
5160        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5161                CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5162            int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5163            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5164                &fwk_DevCamDebug_af_search_target_pos, 1);
5165        }
5166        // DevCamDebug metadata translateFromHalMetadata AEC
5167        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5168                CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5169            int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5170            camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5171    }
5172        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5173                CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5174            int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5175            camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5176        }
5177        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5178                CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5179            int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5180            camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5181        }
5182        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5183                CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5184            int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5185            camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5186        }
5187        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5188                CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5189            int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5190            camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5191        }
5192        IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5193                CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5194            float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5195            camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5196        }
5197        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5198                CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5199            int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5200            camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5201        }
5202        IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5203                CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5204            float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5205            camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5206        }
5207        // DevCamDebug metadata translateFromHalMetadata AWB
5208        IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5209                CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5210            float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5211            camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5212        }
5213        IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5214                CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5215            float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5216            camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5217        }
5218        IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5219                CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5220            float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5221            camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5222        }
5223        IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5224                CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5225            int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5226            camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5227        }
5228        IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5229                CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5230            int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5231            camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5232        }
5233    }
5234    // atrace_end(ATRACE_TAG_ALWAYS);
5235
5236    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5237        int64_t fwk_frame_number = *frame_number;
5238        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5239    }
5240
5241    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5242        int32_t fps_range[2];
5243        fps_range[0] = (int32_t)float_range->min_fps;
5244        fps_range[1] = (int32_t)float_range->max_fps;
5245        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5246                                      fps_range, 2);
5247        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5248             fps_range[0], fps_range[1]);
5249    }
5250
5251    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5252        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5253    }
5254
5255    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5256        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5257                METADATA_MAP_SIZE(SCENE_MODES_MAP),
5258                *sceneMode);
5259        if (NAME_NOT_FOUND != val) {
5260            uint8_t fwkSceneMode = (uint8_t)val;
5261            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5262            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5263                     fwkSceneMode);
5264        }
5265    }
5266
5267    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5268        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5269        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5270    }
5271
5272    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5273        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5274        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5275    }
5276
5277    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5278        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5279        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5280    }
5281
5282    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5283            CAM_INTF_META_EDGE_MODE, metadata) {
5284        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5285    }
5286
5287    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5288        uint8_t fwk_flashPower = (uint8_t) *flashPower;
5289        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5290    }
5291
5292    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5293        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5294    }
5295
5296    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5297        if (0 <= *flashState) {
5298            uint8_t fwk_flashState = (uint8_t) *flashState;
5299            if (!gCamCapability[mCameraId]->flash_available) {
5300                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5301            }
5302            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5303        }
5304    }
5305
5306    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5307        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5308        if (NAME_NOT_FOUND != val) {
5309            uint8_t fwk_flashMode = (uint8_t)val;
5310            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5311        }
5312    }
5313
5314    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5315        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5316        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5317    }
5318
5319    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5320        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5321    }
5322
5323    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5324        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5325    }
5326
5327    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5328        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5329    }
5330
5331    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5332        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5333        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5334    }
5335
5336    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5337        uint8_t fwk_videoStab = (uint8_t) *videoStab;
5338        LOGD("fwk_videoStab = %d", fwk_videoStab);
5339        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5340    } else {
5341        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5342        // and so hardcoding the Video Stab result to OFF mode.
5343        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5344        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5345        LOGD("%s: EIS result default to OFF mode", __func__);
5346    }
5347
5348    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5349        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5350        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5351    }
5352
5353    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5354        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5355    }
5356
5357    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5358        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5359        float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
5360
5361        adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5362              gCamCapability[mCameraId]->color_arrangement);
5363
5364        LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
5365          blackLevelAppliedPattern->cam_black_level[0],
5366          blackLevelAppliedPattern->cam_black_level[1],
5367          blackLevelAppliedPattern->cam_black_level[2],
5368          blackLevelAppliedPattern->cam_black_level[3]);
5369        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5370                BLACK_LEVEL_PATTERN_CNT);
5371
5372        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5373        // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
5374        // depth space.
5375        fwk_blackLevelInd[0] /= 4.0;
5376        fwk_blackLevelInd[1] /= 4.0;
5377        fwk_blackLevelInd[2] /= 4.0;
5378        fwk_blackLevelInd[3] /= 4.0;
5379        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
5380                BLACK_LEVEL_PATTERN_CNT);
5381    }
5382
5383    // Fixed whitelevel is used by ISP/Sensor
5384    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5385            &gCamCapability[mCameraId]->white_level, 1);
5386
5387    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5388            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5389        int32_t scalerCropRegion[4];
5390        scalerCropRegion[0] = hScalerCropRegion->left;
5391        scalerCropRegion[1] = hScalerCropRegion->top;
5392        scalerCropRegion[2] = hScalerCropRegion->width;
5393        scalerCropRegion[3] = hScalerCropRegion->height;
5394
5395        // Adjust crop region from sensor output coordinate system to active
5396        // array coordinate system.
5397        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5398                scalerCropRegion[2], scalerCropRegion[3]);
5399
5400        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5401    }
5402
5403    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5404        LOGD("sensorExpTime = %lld", *sensorExpTime);
5405        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5406    }
5407
5408    IF_META_AVAILABLE(int64_t, sensorFameDuration,
5409            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5410        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5411        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5412    }
5413
5414    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5415            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5416        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5417        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5418                sensorRollingShutterSkew, 1);
5419    }
5420
5421    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5422        LOGD("sensorSensitivity = %d", *sensorSensitivity);
5423        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5424
5425        //calculate the noise profile based on sensitivity
5426        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5427        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5428        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5429        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5430            noise_profile[i]   = noise_profile_S;
5431            noise_profile[i+1] = noise_profile_O;
5432        }
5433        LOGD("noise model entry (S, O) is (%f, %f)",
5434                noise_profile_S, noise_profile_O);
5435        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5436                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5437    }
5438
5439    int32_t fwk_ispSensitivity = 100;
5440    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5441        fwk_ispSensitivity = (int32_t) *ispSensitivity;
5442    }
5443    IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5444        fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5445    }
5446    camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5447
5448    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5449        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5450        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5451    }
5452
5453    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5454        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5455                *faceDetectMode);
5456        if (NAME_NOT_FOUND != val) {
5457            uint8_t fwk_faceDetectMode = (uint8_t)val;
5458            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5459
5460            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5461                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5462                        CAM_INTF_META_FACE_DETECTION, metadata) {
5463                    uint8_t numFaces = MIN(
5464                            faceDetectionInfo->num_faces_detected, MAX_ROI);
5465                    int32_t faceIds[MAX_ROI];
5466                    uint8_t faceScores[MAX_ROI];
5467                    int32_t faceRectangles[MAX_ROI * 4];
5468                    int32_t faceLandmarks[MAX_ROI * 6];
5469                    size_t j = 0, k = 0;
5470
5471                    for (size_t i = 0; i < numFaces; i++) {
5472                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5473                        // Adjust crop region from sensor output coordinate system to active
5474                        // array coordinate system.
5475                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5476                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
5477                                rect.width, rect.height);
5478
5479                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5480                                faceRectangles+j, -1);
5481
5482                        j+= 4;
5483                    }
5484                    if (numFaces <= 0) {
5485                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5486                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5487                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5488                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5489                    }
5490
5491                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5492                            numFaces);
5493                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5494                            faceRectangles, numFaces * 4U);
5495                    if (fwk_faceDetectMode ==
5496                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5497                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5498                                CAM_INTF_META_FACE_LANDMARK, metadata) {
5499
5500                            for (size_t i = 0; i < numFaces; i++) {
5501                                // Map the co-ordinate sensor output coordinate system to active
5502                                // array coordinate system.
5503                                mCropRegionMapper.toActiveArray(
5504                                        landmarks->face_landmarks[i].left_eye_center.x,
5505                                        landmarks->face_landmarks[i].left_eye_center.y);
5506                                mCropRegionMapper.toActiveArray(
5507                                        landmarks->face_landmarks[i].right_eye_center.x,
5508                                        landmarks->face_landmarks[i].right_eye_center.y);
5509                                mCropRegionMapper.toActiveArray(
5510                                        landmarks->face_landmarks[i].mouth_center.x,
5511                                        landmarks->face_landmarks[i].mouth_center.y);
5512
5513                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5514                                k+= 6;
5515                            }
5516                        }
5517
5518                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5519                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5520                                faceLandmarks, numFaces * 6U);
5521                   }
5522                }
5523            }
5524        }
5525    }
5526
5527    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5528        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5529        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5530    }
5531
5532    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5533            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5534        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5535        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5536    }
5537
5538    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5539            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5540        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5541                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5542    }
5543
5544    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5545            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5546        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5547                CAM_MAX_SHADING_MAP_HEIGHT);
5548        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5549                CAM_MAX_SHADING_MAP_WIDTH);
5550        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5551                lensShadingMap->lens_shading, 4U * map_width * map_height);
5552    }
5553
5554    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5555        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5556        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5557    }
5558
5559    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5560        //Populate CAM_INTF_META_TONEMAP_CURVES
5561        /* ch0 = G, ch 1 = B, ch 2 = R*/
5562        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5563            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5564                     tonemap->tonemap_points_cnt,
5565                    CAM_MAX_TONEMAP_CURVE_SIZE);
5566            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5567        }
5568
5569        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5570                        &tonemap->curves[0].tonemap_points[0][0],
5571                        tonemap->tonemap_points_cnt * 2);
5572
5573        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5574                        &tonemap->curves[1].tonemap_points[0][0],
5575                        tonemap->tonemap_points_cnt * 2);
5576
5577        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5578                        &tonemap->curves[2].tonemap_points[0][0],
5579                        tonemap->tonemap_points_cnt * 2);
5580    }
5581
5582    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5583            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5584        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5585                CC_GAINS_COUNT);
5586    }
5587
5588    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5589            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5590        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5591                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5592                CC_MATRIX_COLS * CC_MATRIX_ROWS);
5593    }
5594
5595    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5596            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5597        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5598            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5599                     toneCurve->tonemap_points_cnt,
5600                    CAM_MAX_TONEMAP_CURVE_SIZE);
5601            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5602        }
5603        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5604                (float*)toneCurve->curve.tonemap_points,
5605                toneCurve->tonemap_points_cnt * 2);
5606    }
5607
5608    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5609            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5610        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5611                predColorCorrectionGains->gains, 4);
5612    }
5613
5614    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5615            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5616        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5617                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5618                CC_MATRIX_ROWS * CC_MATRIX_COLS);
5619    }
5620
5621    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5622        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5623    }
5624
5625    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5626        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5627        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5628    }
5629
5630    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5631        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5632        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5633    }
5634
5635    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5636        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5637                *effectMode);
5638        if (NAME_NOT_FOUND != val) {
5639            uint8_t fwk_effectMode = (uint8_t)val;
5640            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5641        }
5642    }
5643
5644    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5645            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5646        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5647                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5648        if (NAME_NOT_FOUND != fwk_testPatternMode) {
5649            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5650        }
5651        int32_t fwk_testPatternData[4];
5652        fwk_testPatternData[0] = testPatternData->r;
5653        fwk_testPatternData[3] = testPatternData->b;
5654        switch (gCamCapability[mCameraId]->color_arrangement) {
5655        case CAM_FILTER_ARRANGEMENT_RGGB:
5656        case CAM_FILTER_ARRANGEMENT_GRBG:
5657            fwk_testPatternData[1] = testPatternData->gr;
5658            fwk_testPatternData[2] = testPatternData->gb;
5659            break;
5660        case CAM_FILTER_ARRANGEMENT_GBRG:
5661        case CAM_FILTER_ARRANGEMENT_BGGR:
5662            fwk_testPatternData[2] = testPatternData->gr;
5663            fwk_testPatternData[1] = testPatternData->gb;
5664            break;
5665        default:
5666            LOGE("color arrangement %d is not supported",
5667                gCamCapability[mCameraId]->color_arrangement);
5668            break;
5669        }
5670        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5671    }
5672
5673    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5674        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5675    }
5676
5677    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5678        String8 str((const char *)gps_methods);
5679        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5680    }
5681
5682    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5683        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5684    }
5685
5686    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5687        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5688    }
5689
5690    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5691        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5692        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5693    }
5694
5695    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5696        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5697        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5698    }
5699
5700    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5701        int32_t fwk_thumb_size[2];
5702        fwk_thumb_size[0] = thumb_size->width;
5703        fwk_thumb_size[1] = thumb_size->height;
5704        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5705    }
5706
5707    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5708        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5709                privateData,
5710                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5711    }
5712
5713    if (metadata->is_tuning_params_valid) {
5714        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5715        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5716        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5717
5718
5719        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5720                sizeof(uint32_t));
5721        data += sizeof(uint32_t);
5722
5723        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5724                sizeof(uint32_t));
5725        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5726        data += sizeof(uint32_t);
5727
5728        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5729                sizeof(uint32_t));
5730        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5731        data += sizeof(uint32_t);
5732
5733        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5734                sizeof(uint32_t));
5735        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5736        data += sizeof(uint32_t);
5737
5738        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5739                sizeof(uint32_t));
5740        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5741        data += sizeof(uint32_t);
5742
5743        metadata->tuning_params.tuning_mod3_data_size = 0;
5744        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5745                sizeof(uint32_t));
5746        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5747        data += sizeof(uint32_t);
5748
5749        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5750                TUNING_SENSOR_DATA_MAX);
5751        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5752                count);
5753        data += count;
5754
5755        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5756                TUNING_VFE_DATA_MAX);
5757        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5758                count);
5759        data += count;
5760
5761        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5762                TUNING_CPP_DATA_MAX);
5763        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5764                count);
5765        data += count;
5766
5767        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5768                TUNING_CAC_DATA_MAX);
5769        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5770                count);
5771        data += count;
5772
5773        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5774                (int32_t *)(void *)tuning_meta_data_blob,
5775                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5776    }
5777
5778    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5779            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5780        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5781                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5782                NEUTRAL_COL_POINTS);
5783    }
5784
5785    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5786        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5787        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5788    }
5789
5790    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5791        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5792        // Adjust crop region from sensor output coordinate system to active
5793        // array coordinate system.
5794        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5795                hAeRegions->rect.width, hAeRegions->rect.height);
5796
5797        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5798        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5799                REGIONS_TUPLE_COUNT);
5800        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5801                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5802                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5803                hAeRegions->rect.height);
5804    }
5805
5806    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5807        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5808        if (NAME_NOT_FOUND != val) {
5809            uint8_t fwkAfMode = (uint8_t)val;
5810            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5811            LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5812        } else {
5813            LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5814                    val);
5815        }
5816    }
5817
5818    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5819        uint8_t fwk_afState = (uint8_t) *afState;
5820        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5821        LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5822    }
5823
5824    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5825        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5826    }
5827
5828    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5829        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5830    }
5831
5832    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5833        uint8_t fwk_lensState = *lensState;
5834        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5835    }
5836
5837    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5838        /*af regions*/
5839        int32_t afRegions[REGIONS_TUPLE_COUNT];
5840        // Adjust crop region from sensor output coordinate system to active
5841        // array coordinate system.
5842        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5843                hAfRegions->rect.width, hAfRegions->rect.height);
5844
5845        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5846        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5847                REGIONS_TUPLE_COUNT);
5848        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5849                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5850                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5851                hAfRegions->rect.height);
5852    }
5853
5854    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5855        uint32_t ab_mode = *hal_ab_mode;
5856        if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
5857                ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
5858              ab_mode = CAM_ANTIBANDING_MODE_AUTO;
5859        }
5860        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5861                ab_mode);
5862        if (NAME_NOT_FOUND != val) {
5863            uint8_t fwk_ab_mode = (uint8_t)val;
5864            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5865        }
5866    }
5867
5868    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5869        int val = lookupFwkName(SCENE_MODES_MAP,
5870                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5871        if (NAME_NOT_FOUND != val) {
5872            uint8_t fwkBestshotMode = (uint8_t)val;
5873            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5874            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5875        } else {
5876            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5877        }
5878    }
5879
5880    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5881         uint8_t fwk_mode = (uint8_t) *mode;
5882         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5883    }
5884
5885    /* Constant metadata values to be update*/
5886    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5887    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5888
5889    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5890    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5891
5892    int32_t hotPixelMap[2];
5893    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5894
5895    // CDS
5896    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5897        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5898    }
5899
5900    // TNR
5901    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5902        uint8_t tnr_enable       = tnr->denoise_enable;
5903        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5904
5905        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5906        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5907    }
5908
5909    // Reprocess crop data
5910    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5911        uint8_t cnt = crop_data->num_of_streams;
5912        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5913            // mm-qcamera-daemon only posts crop_data for streams
5914            // not linked to pproc. So no valid crop metadata is not
5915            // necessarily an error case.
5916            LOGD("No valid crop metadata entries");
5917        } else {
5918            uint32_t reproc_stream_id;
5919            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5920                LOGD("No reprocessible stream found, ignore crop data");
5921            } else {
5922                int rc = NO_ERROR;
5923                Vector<int32_t> roi_map;
5924                int32_t *crop = new int32_t[cnt*4];
5925                if (NULL == crop) {
5926                   rc = NO_MEMORY;
5927                }
5928                if (NO_ERROR == rc) {
5929                    int32_t streams_found = 0;
5930                    for (size_t i = 0; i < cnt; i++) {
5931                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5932                            if (pprocDone) {
5933                                // HAL already does internal reprocessing,
5934                                // either via reprocessing before JPEG encoding,
5935                                // or offline postprocessing for pproc bypass case.
5936                                crop[0] = 0;
5937                                crop[1] = 0;
5938                                crop[2] = mInputStreamInfo.dim.width;
5939                                crop[3] = mInputStreamInfo.dim.height;
5940                            } else {
5941                                crop[0] = crop_data->crop_info[i].crop.left;
5942                                crop[1] = crop_data->crop_info[i].crop.top;
5943                                crop[2] = crop_data->crop_info[i].crop.width;
5944                                crop[3] = crop_data->crop_info[i].crop.height;
5945                            }
5946                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5947                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5948                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5949                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5950                            streams_found++;
5951                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5952                                    crop[0], crop[1], crop[2], crop[3]);
5953                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5954                                    crop_data->crop_info[i].roi_map.left,
5955                                    crop_data->crop_info[i].roi_map.top,
5956                                    crop_data->crop_info[i].roi_map.width,
5957                                    crop_data->crop_info[i].roi_map.height);
5958                            break;
5959
5960                       }
5961                    }
5962                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5963                            &streams_found, 1);
5964                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5965                            crop, (size_t)(streams_found * 4));
5966                    if (roi_map.array()) {
5967                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5968                                roi_map.array(), roi_map.size());
5969                    }
5970               }
5971               if (crop) {
5972                   delete [] crop;
5973               }
5974            }
5975        }
5976    }
5977
5978    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5979        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5980        // so hardcoding the CAC result to OFF mode.
5981        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5982        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5983    } else {
5984        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5985            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5986                    *cacMode);
5987            if (NAME_NOT_FOUND != val) {
5988                uint8_t resultCacMode = (uint8_t)val;
5989                // check whether CAC result from CB is equal to Framework set CAC mode
5990                // If not equal then set the CAC mode came in corresponding request
5991                if (fwk_cacMode != resultCacMode) {
5992                    resultCacMode = fwk_cacMode;
5993                }
5994                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5995                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5996            } else {
5997                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5998            }
5999        }
6000    }
6001
6002    // Post blob of cam_cds_data through vendor tag.
6003    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6004        uint8_t cnt = cdsInfo->num_of_streams;
6005        cam_cds_data_t cdsDataOverride;
6006        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6007        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6008        cdsDataOverride.num_of_streams = 1;
6009        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6010            uint32_t reproc_stream_id;
6011            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6012                LOGD("No reprocessible stream found, ignore cds data");
6013            } else {
6014                for (size_t i = 0; i < cnt; i++) {
6015                    if (cdsInfo->cds_info[i].stream_id ==
6016                            reproc_stream_id) {
6017                        cdsDataOverride.cds_info[0].cds_enable =
6018                                cdsInfo->cds_info[i].cds_enable;
6019                        break;
6020                    }
6021                }
6022            }
6023        } else {
6024            LOGD("Invalid stream count %d in CDS_DATA", cnt);
6025        }
6026        camMetadata.update(QCAMERA3_CDS_INFO,
6027                (uint8_t *)&cdsDataOverride,
6028                sizeof(cam_cds_data_t));
6029    }
6030
6031    // Ldaf calibration data
6032    if (!mLdafCalibExist) {
6033        IF_META_AVAILABLE(uint32_t, ldafCalib,
6034                CAM_INTF_META_LDAF_EXIF, metadata) {
6035            mLdafCalibExist = true;
6036            mLdafCalib[0] = ldafCalib[0];
6037            mLdafCalib[1] = ldafCalib[1];
6038            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6039                    ldafCalib[0], ldafCalib[1]);
6040        }
6041    }
6042
6043    // AF scene change
6044    IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6045        camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6046    }
6047
6048    /* In batch mode, cache the first metadata in the batch */
6049    if (mBatchSize && firstMetadataInBatch) {
6050        mCachedMetadata.clear();
6051        mCachedMetadata = camMetadata;
6052    }
6053
6054    resultMetadata = camMetadata.release();
6055    return resultMetadata;
6056}
6057
6058/*===========================================================================
6059 * FUNCTION   : saveExifParams
6060 *
6061 * DESCRIPTION:
6062 *
6063 * PARAMETERS :
6064 *   @metadata : metadata information from callback
6065 *
6066 * RETURN     : none
6067 *
6068 *==========================================================================*/
6069void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6070{
6071    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6072            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6073        if (mExifParams.debug_params) {
6074            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6075            mExifParams.debug_params->ae_debug_params_valid = TRUE;
6076        }
6077    }
6078    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6079            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6080        if (mExifParams.debug_params) {
6081            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6082            mExifParams.debug_params->awb_debug_params_valid = TRUE;
6083        }
6084    }
6085    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6086            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6087        if (mExifParams.debug_params) {
6088            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6089            mExifParams.debug_params->af_debug_params_valid = TRUE;
6090        }
6091    }
6092    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6093            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6094        if (mExifParams.debug_params) {
6095            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6096            mExifParams.debug_params->asd_debug_params_valid = TRUE;
6097        }
6098    }
6099    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6100            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6101        if (mExifParams.debug_params) {
6102            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6103            mExifParams.debug_params->stats_debug_params_valid = TRUE;
6104        }
6105    }
6106    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6107            CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6108        if (mExifParams.debug_params) {
6109            mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6110            mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6111        }
6112    }
6113    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6114            CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6115        if (mExifParams.debug_params) {
6116            mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6117            mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6118        }
6119    }
6120    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6121            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6122        if (mExifParams.debug_params) {
6123            mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6124            mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6125        }
6126    }
6127}
6128
6129/*===========================================================================
6130 * FUNCTION   : get3AExifParams
6131 *
6132 * DESCRIPTION:
6133 *
6134 * PARAMETERS : none
6135 *
6136 *
6137 * RETURN     : mm_jpeg_exif_params_t
6138 *
6139 *==========================================================================*/
6140mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6141{
6142    return mExifParams;
6143}
6144
6145/*===========================================================================
6146 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
6147 *
6148 * DESCRIPTION:
6149 *
6150 * PARAMETERS :
6151 *   @metadata : metadata information from callback
6152 *
6153 * RETURN     : camera_metadata_t*
6154 *              metadata in a format specified by fwk
6155 *==========================================================================*/
6156camera_metadata_t*
6157QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6158                                (metadata_buffer_t *metadata)
6159{
6160    CameraMetadata camMetadata;
6161    camera_metadata_t *resultMetadata;
6162
6163
6164    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6165        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6166        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6167        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6168    }
6169
6170    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6171        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6172                &aecTrigger->trigger, 1);
6173        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6174                &aecTrigger->trigger_id, 1);
6175        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6176                 aecTrigger->trigger);
6177        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6178                aecTrigger->trigger_id);
6179    }
6180
6181    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6182        uint8_t fwk_ae_state = (uint8_t) *ae_state;
6183        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6184        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6185    }
6186
6187    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6188        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6189                &af_trigger->trigger, 1);
6190        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6191                 af_trigger->trigger);
6192        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6193        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6194                af_trigger->trigger_id);
6195    }
6196
6197    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6198        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6199                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6200        if (NAME_NOT_FOUND != val) {
6201            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6202            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6203            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6204        } else {
6205            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6206        }
6207    }
6208
6209    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6210    uint32_t aeMode = CAM_AE_MODE_MAX;
6211    int32_t flashMode = CAM_FLASH_MODE_MAX;
6212    int32_t redeye = -1;
6213    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6214        aeMode = *pAeMode;
6215    }
6216    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6217        flashMode = *pFlashMode;
6218    }
6219    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6220        redeye = *pRedeye;
6221    }
6222
6223    if (1 == redeye) {
6224        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6225        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6226    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6227        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6228                flashMode);
6229        if (NAME_NOT_FOUND != val) {
6230            fwk_aeMode = (uint8_t)val;
6231            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6232        } else {
6233            LOGE("Unsupported flash mode %d", flashMode);
6234        }
6235    } else if (aeMode == CAM_AE_MODE_ON) {
6236        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6237        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6238    } else if (aeMode == CAM_AE_MODE_OFF) {
6239        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6240        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6241    } else {
6242        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6243              "flashMode:%d, aeMode:%u!!!",
6244                 redeye, flashMode, aeMode);
6245    }
6246
6247    resultMetadata = camMetadata.release();
6248    return resultMetadata;
6249}
6250
6251/*===========================================================================
6252 * FUNCTION   : dumpMetadataToFile
6253 *
6254 * DESCRIPTION: Dumps tuning metadata to file system
6255 *
6256 * PARAMETERS :
6257 *   @meta           : tuning metadata
6258 *   @dumpFrameCount : current dump frame count
6259 *   @enabled        : Enable mask
6260 *
6261 *==========================================================================*/
6262void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6263                                                   uint32_t &dumpFrameCount,
6264                                                   bool enabled,
6265                                                   const char *type,
6266                                                   uint32_t frameNumber)
6267{
6268    //Some sanity checks
6269    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6270        LOGE("Tuning sensor data size bigger than expected %d: %d",
6271              meta.tuning_sensor_data_size,
6272              TUNING_SENSOR_DATA_MAX);
6273        return;
6274    }
6275
6276    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6277        LOGE("Tuning VFE data size bigger than expected %d: %d",
6278              meta.tuning_vfe_data_size,
6279              TUNING_VFE_DATA_MAX);
6280        return;
6281    }
6282
6283    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6284        LOGE("Tuning CPP data size bigger than expected %d: %d",
6285              meta.tuning_cpp_data_size,
6286              TUNING_CPP_DATA_MAX);
6287        return;
6288    }
6289
6290    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6291        LOGE("Tuning CAC data size bigger than expected %d: %d",
6292              meta.tuning_cac_data_size,
6293              TUNING_CAC_DATA_MAX);
6294        return;
6295    }
6296    //
6297
6298    if(enabled){
6299        char timeBuf[FILENAME_MAX];
6300        char buf[FILENAME_MAX];
6301        memset(buf, 0, sizeof(buf));
6302        memset(timeBuf, 0, sizeof(timeBuf));
6303        time_t current_time;
6304        struct tm * timeinfo;
6305        time (&current_time);
6306        timeinfo = localtime (&current_time);
6307        if (timeinfo != NULL) {
6308            /* Consistent naming for Jpeg+meta+raw: meta name */
6309            strftime (timeBuf, sizeof(timeBuf),
6310                    QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6311            /* Consistent naming for Jpeg+meta+raw: meta name end*/
6312        }
6313        String8 filePath(timeBuf);
6314         /* Consistent naming for Jpeg+meta+raw */
6315        snprintf(buf,
6316                sizeof(buf),
6317                "%dm_%s_%d.bin",
6318                dumpFrameCount,
6319                type,
6320                frameNumber);
6321         /* Consistent naming for Jpeg+meta+raw end */
6322        filePath.append(buf);
6323        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6324        if (file_fd >= 0) {
6325            ssize_t written_len = 0;
6326            meta.tuning_data_version = TUNING_DATA_VERSION;
6327            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6328            written_len += write(file_fd, data, sizeof(uint32_t));
6329            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6330            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6331            written_len += write(file_fd, data, sizeof(uint32_t));
6332            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6333            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6334            written_len += write(file_fd, data, sizeof(uint32_t));
6335            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6336            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6337            written_len += write(file_fd, data, sizeof(uint32_t));
6338            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6339            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6340            written_len += write(file_fd, data, sizeof(uint32_t));
6341            meta.tuning_mod3_data_size = 0;
6342            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6343            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6344            written_len += write(file_fd, data, sizeof(uint32_t));
6345            size_t total_size = meta.tuning_sensor_data_size;
6346            data = (void *)((uint8_t *)&meta.data);
6347            written_len += write(file_fd, data, total_size);
6348            total_size = meta.tuning_vfe_data_size;
6349            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6350            written_len += write(file_fd, data, total_size);
6351            total_size = meta.tuning_cpp_data_size;
6352            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6353            written_len += write(file_fd, data, total_size);
6354            total_size = meta.tuning_cac_data_size;
6355            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6356            written_len += write(file_fd, data, total_size);
6357            close(file_fd);
6358        }else {
6359            LOGE("fail to open file for metadata dumping");
6360        }
6361    }
6362}
6363
6364/*===========================================================================
6365 * FUNCTION   : cleanAndSortStreamInfo
6366 *
6367 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6368 *              and sort them such that raw stream is at the end of the list
6369 *              This is a workaround for camera daemon constraint.
6370 *
6371 * PARAMETERS : None
6372 *
6373 *==========================================================================*/
6374void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6375{
6376    List<stream_info_t *> newStreamInfo;
6377
6378    /*clean up invalid streams*/
6379    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6380            it != mStreamInfo.end();) {
6381        if(((*it)->status) == INVALID){
6382            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6383            delete channel;
6384            free(*it);
6385            it = mStreamInfo.erase(it);
6386        } else {
6387            it++;
6388        }
6389    }
6390
6391    // Move preview/video/callback/snapshot streams into newList
6392    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6393            it != mStreamInfo.end();) {
6394        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6395                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6396                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6397            newStreamInfo.push_back(*it);
6398            it = mStreamInfo.erase(it);
6399        } else
6400            it++;
6401    }
6402    // Move raw streams into newList
6403    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6404            it != mStreamInfo.end();) {
6405        newStreamInfo.push_back(*it);
6406        it = mStreamInfo.erase(it);
6407    }
6408
6409    mStreamInfo = newStreamInfo;
6410}
6411
6412/*===========================================================================
6413 * FUNCTION   : extractJpegMetadata
6414 *
6415 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6416 *              JPEG metadata is cached in HAL, and return as part of capture
6417 *              result when metadata is returned from camera daemon.
6418 *
6419 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6420 *              @request:      capture request
6421 *
6422 *==========================================================================*/
6423void QCamera3HardwareInterface::extractJpegMetadata(
6424        CameraMetadata& jpegMetadata,
6425        const camera3_capture_request_t *request)
6426{
6427    CameraMetadata frame_settings;
6428    frame_settings = request->settings;
6429
6430    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6431        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6432                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6433                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6434
6435    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6436        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6437                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6438                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6439
6440    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6441        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6442                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6443                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6444
6445    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6446        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6447                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6448                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6449
6450    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6451        jpegMetadata.update(ANDROID_JPEG_QUALITY,
6452                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6453                frame_settings.find(ANDROID_JPEG_QUALITY).count);
6454
6455    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6456        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6457                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6458                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6459
6460    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6461        int32_t thumbnail_size[2];
6462        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6463        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6464        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6465            int32_t orientation =
6466                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6467            if ((orientation == 90) || (orientation == 270)) {
6468               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6469               int32_t temp;
6470               temp = thumbnail_size[0];
6471               thumbnail_size[0] = thumbnail_size[1];
6472               thumbnail_size[1] = temp;
6473            }
6474         }
6475         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6476                thumbnail_size,
6477                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6478    }
6479
6480}
6481
6482/*===========================================================================
6483 * FUNCTION   : convertToRegions
6484 *
6485 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6486 *
6487 * PARAMETERS :
6488 *   @rect   : cam_rect_t struct to convert
6489 *   @region : int32_t destination array
6490 *   @weight : if we are converting from cam_area_t, weight is valid
6491 *             else weight = -1
6492 *
6493 *==========================================================================*/
6494void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6495        int32_t *region, int weight)
6496{
6497    region[0] = rect.left;
6498    region[1] = rect.top;
6499    region[2] = rect.left + rect.width;
6500    region[3] = rect.top + rect.height;
6501    if (weight > -1) {
6502        region[4] = weight;
6503    }
6504}
6505
6506/*===========================================================================
6507 * FUNCTION   : convertFromRegions
6508 *
6509 * DESCRIPTION: helper method to convert from array to cam_rect_t
6510 *
6511 * PARAMETERS :
6512 *   @rect   : cam_rect_t struct to convert
6513 *   @region : int32_t destination array
6514 *   @weight : if we are converting from cam_area_t, weight is valid
6515 *             else weight = -1
6516 *
6517 *==========================================================================*/
6518void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6519        const camera_metadata_t *settings, uint32_t tag)
6520{
6521    CameraMetadata frame_settings;
6522    frame_settings = settings;
6523    int32_t x_min = frame_settings.find(tag).data.i32[0];
6524    int32_t y_min = frame_settings.find(tag).data.i32[1];
6525    int32_t x_max = frame_settings.find(tag).data.i32[2];
6526    int32_t y_max = frame_settings.find(tag).data.i32[3];
6527    roi.weight = frame_settings.find(tag).data.i32[4];
6528    roi.rect.left = x_min;
6529    roi.rect.top = y_min;
6530    roi.rect.width = x_max - x_min;
6531    roi.rect.height = y_max - y_min;
6532}
6533
6534/*===========================================================================
6535 * FUNCTION   : resetIfNeededROI
6536 *
6537 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6538 *              crop region
6539 *
6540 * PARAMETERS :
6541 *   @roi       : cam_area_t struct to resize
6542 *   @scalerCropRegion : cam_crop_region_t region to compare against
6543 *
6544 *
6545 *==========================================================================*/
6546bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6547                                                 const cam_crop_region_t* scalerCropRegion)
6548{
6549    int32_t roi_x_max = roi->rect.width + roi->rect.left;
6550    int32_t roi_y_max = roi->rect.height + roi->rect.top;
6551    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6552    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6553
6554    /* According to spec weight = 0 is used to indicate roi needs to be disabled
6555     * without having this check the calculations below to validate if the roi
6556     * is inside scalar crop region will fail resulting in the roi not being
6557     * reset causing algorithm to continue to use stale roi window
6558     */
6559    if (roi->weight == 0) {
6560        return true;
6561    }
6562
6563    if ((roi_x_max < scalerCropRegion->left) ||
6564        // right edge of roi window is left of scalar crop's left edge
6565        (roi_y_max < scalerCropRegion->top)  ||
6566        // bottom edge of roi window is above scalar crop's top edge
6567        (roi->rect.left > crop_x_max) ||
6568        // left edge of roi window is beyond(right) of scalar crop's right edge
6569        (roi->rect.top > crop_y_max)){
6570        // top edge of roi windo is above scalar crop's top edge
6571        return false;
6572    }
6573    if (roi->rect.left < scalerCropRegion->left) {
6574        roi->rect.left = scalerCropRegion->left;
6575    }
6576    if (roi->rect.top < scalerCropRegion->top) {
6577        roi->rect.top = scalerCropRegion->top;
6578    }
6579    if (roi_x_max > crop_x_max) {
6580        roi_x_max = crop_x_max;
6581    }
6582    if (roi_y_max > crop_y_max) {
6583        roi_y_max = crop_y_max;
6584    }
6585    roi->rect.width = roi_x_max - roi->rect.left;
6586    roi->rect.height = roi_y_max - roi->rect.top;
6587    return true;
6588}
6589
6590/*===========================================================================
6591 * FUNCTION   : convertLandmarks
6592 *
6593 * DESCRIPTION: helper method to extract the landmarks from face detection info
6594 *
6595 * PARAMETERS :
6596 *   @landmark_data : input landmark data to be converted
6597 *   @landmarks : int32_t destination array
6598 *
6599 *
6600 *==========================================================================*/
6601void QCamera3HardwareInterface::convertLandmarks(
6602        cam_face_landmarks_info_t landmark_data,
6603        int32_t *landmarks)
6604{
6605    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6606    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6607    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6608    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6609    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6610    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6611}
6612
6613#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6614/*===========================================================================
6615 * FUNCTION   : initCapabilities
6616 *
6617 * DESCRIPTION: initialize camera capabilities in static data struct
6618 *
6619 * PARAMETERS :
6620 *   @cameraId  : camera Id
6621 *
6622 * RETURN     : int32_t type of status
6623 *              NO_ERROR  -- success
6624 *              none-zero failure code
6625 *==========================================================================*/
6626int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6627{
6628    int rc = 0;
6629    mm_camera_vtbl_t *cameraHandle = NULL;
6630    QCamera3HeapMemory *capabilityHeap = NULL;
6631
6632    rc = camera_open((uint8_t)cameraId, &cameraHandle);
6633    if (rc) {
6634        LOGE("camera_open failed. rc = %d", rc);
6635        goto open_failed;
6636    }
6637    if (!cameraHandle) {
6638        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6639        goto open_failed;
6640    }
6641
6642    capabilityHeap = new QCamera3HeapMemory(1);
6643    if (capabilityHeap == NULL) {
6644        LOGE("creation of capabilityHeap failed");
6645        goto heap_creation_failed;
6646    }
6647    /* Allocate memory for capability buffer */
6648    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6649    if(rc != OK) {
6650        LOGE("No memory for cappability");
6651        goto allocate_failed;
6652    }
6653
6654    /* Map memory for capability buffer */
6655    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6656    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6657                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
6658                                capabilityHeap->getFd(0),
6659                                sizeof(cam_capability_t),
6660                                capabilityHeap->getPtr(0));
6661    if(rc < 0) {
6662        LOGE("failed to map capability buffer");
6663        goto map_failed;
6664    }
6665
6666    /* Query Capability */
6667    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6668    if(rc < 0) {
6669        LOGE("failed to query capability");
6670        goto query_failed;
6671    }
6672    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6673    if (!gCamCapability[cameraId]) {
6674        LOGE("out of memory");
6675        goto query_failed;
6676    }
6677    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6678                                        sizeof(cam_capability_t));
6679
6680    int index;
6681    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6682        cam_analysis_info_t *p_analysis_info =
6683                &gCamCapability[cameraId]->analysis_info[index];
6684        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6685        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6686    }
6687    rc = 0;
6688
6689query_failed:
6690    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6691                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
6692map_failed:
6693    capabilityHeap->deallocate();
6694allocate_failed:
6695    delete capabilityHeap;
6696heap_creation_failed:
6697    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6698    cameraHandle = NULL;
6699open_failed:
6700    return rc;
6701}
6702
6703/*==========================================================================
6704 * FUNCTION   : get3Aversion
6705 *
6706 * DESCRIPTION: get the Q3A S/W version
6707 *
6708 * PARAMETERS :
6709 *  @sw_version: Reference of Q3A structure which will hold version info upon
6710 *               return
6711 *
6712 * RETURN     : None
6713 *
6714 *==========================================================================*/
6715void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6716{
6717    if(gCamCapability[mCameraId])
6718        sw_version = gCamCapability[mCameraId]->q3a_version;
6719    else
6720        LOGE("Capability structure NULL!");
6721}
6722
6723
6724/*===========================================================================
6725 * FUNCTION   : initParameters
6726 *
6727 * DESCRIPTION: initialize camera parameters
6728 *
6729 * PARAMETERS :
6730 *
6731 * RETURN     : int32_t type of status
6732 *              NO_ERROR  -- success
6733 *              none-zero failure code
6734 *==========================================================================*/
6735int QCamera3HardwareInterface::initParameters()
6736{
6737    int rc = 0;
6738
6739    //Allocate Set Param Buffer
6740    mParamHeap = new QCamera3HeapMemory(1);
6741    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6742    if(rc != OK) {
6743        rc = NO_MEMORY;
6744        LOGE("Failed to allocate SETPARM Heap memory");
6745        delete mParamHeap;
6746        mParamHeap = NULL;
6747        return rc;
6748    }
6749
6750    //Map memory for parameters buffer
6751    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6752            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6753            mParamHeap->getFd(0),
6754            sizeof(metadata_buffer_t),
6755            (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6756    if(rc < 0) {
6757        LOGE("failed to map SETPARM buffer");
6758        rc = FAILED_TRANSACTION;
6759        mParamHeap->deallocate();
6760        delete mParamHeap;
6761        mParamHeap = NULL;
6762        return rc;
6763    }
6764
6765    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6766
6767    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6768    return rc;
6769}
6770
6771/*===========================================================================
6772 * FUNCTION   : deinitParameters
6773 *
6774 * DESCRIPTION: de-initialize camera parameters
6775 *
6776 * PARAMETERS :
6777 *
6778 * RETURN     : NONE
6779 *==========================================================================*/
6780void QCamera3HardwareInterface::deinitParameters()
6781{
6782    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6783            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6784
6785    mParamHeap->deallocate();
6786    delete mParamHeap;
6787    mParamHeap = NULL;
6788
6789    mParameters = NULL;
6790
6791    free(mPrevParameters);
6792    mPrevParameters = NULL;
6793}
6794
6795/*===========================================================================
6796 * FUNCTION   : calcMaxJpegSize
6797 *
6798 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6799 *
6800 * PARAMETERS :
6801 *
6802 * RETURN     : max_jpeg_size
6803 *==========================================================================*/
6804size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6805{
6806    size_t max_jpeg_size = 0;
6807    size_t temp_width, temp_height;
6808    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6809            MAX_SIZES_CNT);
6810    for (size_t i = 0; i < count; i++) {
6811        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6812        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6813        if (temp_width * temp_height > max_jpeg_size ) {
6814            max_jpeg_size = temp_width * temp_height;
6815        }
6816    }
6817    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6818    return max_jpeg_size;
6819}
6820
6821/*===========================================================================
6822 * FUNCTION   : getMaxRawSize
6823 *
6824 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6825 *
6826 * PARAMETERS :
6827 *
6828 * RETURN     : Largest supported Raw Dimension
6829 *==========================================================================*/
6830cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6831{
6832    int max_width = 0;
6833    cam_dimension_t maxRawSize;
6834
6835    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6836    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6837        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6838            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6839            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6840        }
6841    }
6842    return maxRawSize;
6843}
6844
6845
6846/*===========================================================================
6847 * FUNCTION   : calcMaxJpegDim
6848 *
6849 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6850 *
6851 * PARAMETERS :
6852 *
6853 * RETURN     : max_jpeg_dim
6854 *==========================================================================*/
6855cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6856{
6857    cam_dimension_t max_jpeg_dim;
6858    cam_dimension_t curr_jpeg_dim;
6859    max_jpeg_dim.width = 0;
6860    max_jpeg_dim.height = 0;
6861    curr_jpeg_dim.width = 0;
6862    curr_jpeg_dim.height = 0;
6863    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6864        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6865        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6866        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6867            max_jpeg_dim.width * max_jpeg_dim.height ) {
6868            max_jpeg_dim.width = curr_jpeg_dim.width;
6869            max_jpeg_dim.height = curr_jpeg_dim.height;
6870        }
6871    }
6872    return max_jpeg_dim;
6873}
6874
6875/*===========================================================================
6876 * FUNCTION   : addStreamConfig
6877 *
6878 * DESCRIPTION: adds the stream configuration to the array
6879 *
6880 * PARAMETERS :
6881 * @available_stream_configs : pointer to stream configuration array
6882 * @scalar_format            : scalar format
6883 * @dim                      : configuration dimension
6884 * @config_type              : input or output configuration type
6885 *
6886 * RETURN     : NONE
6887 *==========================================================================*/
6888void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6889        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6890{
6891    available_stream_configs.add(scalar_format);
6892    available_stream_configs.add(dim.width);
6893    available_stream_configs.add(dim.height);
6894    available_stream_configs.add(config_type);
6895}
6896
6897/*===========================================================================
6898 * FUNCTION   : suppportBurstCapture
6899 *
6900 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6901 *
6902 * PARAMETERS :
6903 *   @cameraId  : camera Id
6904 *
6905 * RETURN     : true if camera supports BURST_CAPTURE
6906 *              false otherwise
6907 *==========================================================================*/
6908bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6909{
6910    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6911    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6912    const int32_t highResWidth = 3264;
6913    const int32_t highResHeight = 2448;
6914
6915    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6916        // Maximum resolution images cannot be captured at >= 10fps
6917        // -> not supporting BURST_CAPTURE
6918        return false;
6919    }
6920
6921    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6922        // Maximum resolution images can be captured at >= 20fps
6923        // --> supporting BURST_CAPTURE
6924        return true;
6925    }
6926
6927    // Find the smallest highRes resolution, or largest resolution if there is none
6928    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6929            MAX_SIZES_CNT);
6930    size_t highRes = 0;
6931    while ((highRes + 1 < totalCnt) &&
6932            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6933            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6934            highResWidth * highResHeight)) {
6935        highRes++;
6936    }
6937    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6938        return true;
6939    } else {
6940        return false;
6941    }
6942}
6943
6944/*===========================================================================
6945 * FUNCTION   : initStaticMetadata
6946 *
6947 * DESCRIPTION: initialize the static metadata
6948 *
6949 * PARAMETERS :
6950 *   @cameraId  : camera Id
6951 *
6952 * RETURN     : int32_t type of status
6953 *              0  -- success
6954 *              non-zero failure code
6955 *==========================================================================*/
6956int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6957{
6958    int rc = 0;
6959    CameraMetadata staticInfo;
6960    size_t count = 0;
6961    bool limitedDevice = false;
6962    char prop[PROPERTY_VALUE_MAX];
6963    bool supportBurst = false;
6964
6965    supportBurst = supportBurstCapture(cameraId);
6966
6967    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6968     * guaranteed or if min fps of max resolution is less than 20 fps, its
6969     * advertised as limited device*/
6970    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6971            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6972            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6973            !supportBurst;
6974
6975    uint8_t supportedHwLvl = limitedDevice ?
6976            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6977            // LEVEL_3 - This device will support level 3.
6978            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6979
6980    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6981            &supportedHwLvl, 1);
6982
6983    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6984    /*HAL 3 only*/
6985    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6986                    &gCamCapability[cameraId]->min_focus_distance, 1);
6987
6988    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6989                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6990
6991    /*should be using focal lengths but sensor doesn't provide that info now*/
6992    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6993                      &gCamCapability[cameraId]->focal_length,
6994                      1);
6995
6996    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6997            gCamCapability[cameraId]->apertures,
6998            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6999
7000    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7001            gCamCapability[cameraId]->filter_densities,
7002            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7003
7004
7005    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7006            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7007            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7008
7009    int32_t lens_shading_map_size[] = {
7010            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7011            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7012    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7013                      lens_shading_map_size,
7014                      sizeof(lens_shading_map_size)/sizeof(int32_t));
7015
7016    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7017            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7018
7019    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7020            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7021
7022    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7023            &gCamCapability[cameraId]->max_frame_duration, 1);
7024
7025    camera_metadata_rational baseGainFactor = {
7026            gCamCapability[cameraId]->base_gain_factor.numerator,
7027            gCamCapability[cameraId]->base_gain_factor.denominator};
7028    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7029                      &baseGainFactor, 1);
7030
7031    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7032                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7033
7034    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7035            gCamCapability[cameraId]->pixel_array_size.height};
7036    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7037                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7038
7039    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7040            gCamCapability[cameraId]->active_array_size.top,
7041            gCamCapability[cameraId]->active_array_size.width,
7042            gCamCapability[cameraId]->active_array_size.height};
7043    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7044            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7045
7046    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7047            &gCamCapability[cameraId]->white_level, 1);
7048
7049    int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
7050    adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
7051            gCamCapability[cameraId]->color_arrangement);
7052    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7053            adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
7054
7055    bool hasBlackRegions = false;
7056    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7057        LOGW("black_region_count: %d is bounded to %d",
7058            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7059        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7060    }
7061    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7062        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7063        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7064            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7065        }
7066        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7067                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7068        hasBlackRegions = true;
7069    }
7070
7071    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7072            &gCamCapability[cameraId]->flash_charge_duration, 1);
7073
7074    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7075            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7076
7077    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
7078    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7079            &timestampSource, 1);
7080
7081    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7082            &gCamCapability[cameraId]->histogram_size, 1);
7083
7084    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7085            &gCamCapability[cameraId]->max_histogram_count, 1);
7086
7087    int32_t sharpness_map_size[] = {
7088            gCamCapability[cameraId]->sharpness_map_size.width,
7089            gCamCapability[cameraId]->sharpness_map_size.height};
7090
7091    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7092            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7093
7094    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7095            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7096
7097    int32_t scalar_formats[] = {
7098            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7099            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7100            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7101            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7102            HAL_PIXEL_FORMAT_RAW10,
7103            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7104    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7105    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7106                      scalar_formats,
7107                      scalar_formats_count);
7108
7109    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7110    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7111    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7112            count, MAX_SIZES_CNT, available_processed_sizes);
7113    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7114            available_processed_sizes, count * 2);
7115
7116    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7117    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7118    makeTable(gCamCapability[cameraId]->raw_dim,
7119            count, MAX_SIZES_CNT, available_raw_sizes);
7120    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7121            available_raw_sizes, count * 2);
7122
7123    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7124    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7125    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7126            count, MAX_SIZES_CNT, available_fps_ranges);
7127    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7128            available_fps_ranges, count * 2);
7129
7130    camera_metadata_rational exposureCompensationStep = {
7131            gCamCapability[cameraId]->exp_compensation_step.numerator,
7132            gCamCapability[cameraId]->exp_compensation_step.denominator};
7133    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7134                      &exposureCompensationStep, 1);
7135
7136    Vector<uint8_t> availableVstabModes;
7137    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7138    char eis_prop[PROPERTY_VALUE_MAX];
7139    memset(eis_prop, 0, sizeof(eis_prop));
7140    property_get("persist.camera.eis.enable", eis_prop, "0");
7141    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7142    if (facingBack && eis_prop_set) {
7143        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7144    }
7145    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7146                      availableVstabModes.array(), availableVstabModes.size());
7147
7148    /*HAL 1 and HAL 3 common*/
7149    float maxZoom = 4;
7150    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7151            &maxZoom, 1);
7152
7153    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7154    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7155
7156    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7157    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7158        max3aRegions[2] = 0; /* AF not supported */
7159    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7160            max3aRegions, 3);
7161
7162    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7163    memset(prop, 0, sizeof(prop));
7164    property_get("persist.camera.facedetect", prop, "1");
7165    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7166    LOGD("Support face detection mode: %d",
7167             supportedFaceDetectMode);
7168
7169    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
7170    Vector<uint8_t> availableFaceDetectModes;
7171    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7172    if (supportedFaceDetectMode == 1) {
7173        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7174    } else if (supportedFaceDetectMode == 2) {
7175        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7176    } else if (supportedFaceDetectMode == 3) {
7177        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7178        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7179    } else {
7180        maxFaces = 0;
7181    }
7182    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7183            availableFaceDetectModes.array(),
7184            availableFaceDetectModes.size());
7185    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7186            (int32_t *)&maxFaces, 1);
7187
7188    int32_t exposureCompensationRange[] = {
7189            gCamCapability[cameraId]->exposure_compensation_min,
7190            gCamCapability[cameraId]->exposure_compensation_max};
7191    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7192            exposureCompensationRange,
7193            sizeof(exposureCompensationRange)/sizeof(int32_t));
7194
7195    uint8_t lensFacing = (facingBack) ?
7196            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7197    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7198
7199    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7200                      available_thumbnail_sizes,
7201                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7202
7203    /*all sizes will be clubbed into this tag*/
7204    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7205    /*android.scaler.availableStreamConfigurations*/
7206    Vector<int32_t> available_stream_configs;
7207    cam_dimension_t active_array_dim;
7208    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7209    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7210    /* Add input/output stream configurations for each scalar formats*/
7211    for (size_t j = 0; j < scalar_formats_count; j++) {
7212        switch (scalar_formats[j]) {
7213        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7214        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7215        case HAL_PIXEL_FORMAT_RAW10:
7216            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7217                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7218                addStreamConfig(available_stream_configs, scalar_formats[j],
7219                        gCamCapability[cameraId]->raw_dim[i],
7220                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7221            }
7222            break;
7223        case HAL_PIXEL_FORMAT_BLOB:
7224            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7225                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7226                addStreamConfig(available_stream_configs, scalar_formats[j],
7227                        gCamCapability[cameraId]->picture_sizes_tbl[i],
7228                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7229            }
7230            break;
7231        case HAL_PIXEL_FORMAT_YCbCr_420_888:
7232        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7233        default:
7234            cam_dimension_t largest_picture_size;
7235            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7236            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7237                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7238                addStreamConfig(available_stream_configs, scalar_formats[j],
7239                        gCamCapability[cameraId]->picture_sizes_tbl[i],
7240                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7241                /* Book keep largest */
7242                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7243                        >= largest_picture_size.width &&
7244                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
7245                        >= largest_picture_size.height)
7246                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7247            }
7248            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7249            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7250                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7251                 addStreamConfig(available_stream_configs, scalar_formats[j],
7252                         largest_picture_size,
7253                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7254            }
7255            break;
7256        }
7257    }
7258
7259    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7260                      available_stream_configs.array(), available_stream_configs.size());
7261    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7262    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7263
7264    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7265    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7266
7267    /* android.scaler.availableMinFrameDurations */
7268    Vector<int64_t> available_min_durations;
7269    for (size_t j = 0; j < scalar_formats_count; j++) {
7270        switch (scalar_formats[j]) {
7271        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7272        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7273        case HAL_PIXEL_FORMAT_RAW10:
7274            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7275                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7276                available_min_durations.add(scalar_formats[j]);
7277                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7278                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7279                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7280            }
7281            break;
7282        default:
7283            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7284                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7285                available_min_durations.add(scalar_formats[j]);
7286                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7287                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7288                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7289            }
7290            break;
7291        }
7292    }
7293    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7294                      available_min_durations.array(), available_min_durations.size());
7295
7296    Vector<int32_t> available_hfr_configs;
7297    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7298        int32_t fps = 0;
7299        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7300        case CAM_HFR_MODE_60FPS:
7301            fps = 60;
7302            break;
7303        case CAM_HFR_MODE_90FPS:
7304            fps = 90;
7305            break;
7306        case CAM_HFR_MODE_120FPS:
7307            fps = 120;
7308            break;
7309        case CAM_HFR_MODE_150FPS:
7310            fps = 150;
7311            break;
7312        case CAM_HFR_MODE_180FPS:
7313            fps = 180;
7314            break;
7315        case CAM_HFR_MODE_210FPS:
7316            fps = 210;
7317            break;
7318        case CAM_HFR_MODE_240FPS:
7319            fps = 240;
7320            break;
7321        case CAM_HFR_MODE_480FPS:
7322            fps = 480;
7323            break;
7324        case CAM_HFR_MODE_OFF:
7325        case CAM_HFR_MODE_MAX:
7326        default:
7327            break;
7328        }
7329
7330        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7331        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7332            /* For each HFR frame rate, need to advertise one variable fps range
7333             * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7334             * and [120, 120]. While camcorder preview alone is running [30, 120] is
7335             * set by the app. When video recording is started, [120, 120] is
7336             * set. This way sensor configuration does not change when recording
7337             * is started */
7338
7339            /* (width, height, fps_min, fps_max, batch_size_max) */
7340            for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7341                j < MAX_SIZES_CNT; j++) {
7342                available_hfr_configs.add(
7343                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7344                available_hfr_configs.add(
7345                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7346                available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7347                available_hfr_configs.add(fps);
7348                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7349
7350                /* (width, height, fps_min, fps_max, batch_size_max) */
7351                available_hfr_configs.add(
7352                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7353                available_hfr_configs.add(
7354                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7355                available_hfr_configs.add(fps);
7356                available_hfr_configs.add(fps);
7357                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7358            }
7359       }
7360    }
7361    //Advertise HFR capability only if the property is set
7362    memset(prop, 0, sizeof(prop));
7363    property_get("persist.camera.hal3hfr.enable", prop, "1");
7364    uint8_t hfrEnable = (uint8_t)atoi(prop);
7365
7366    if(hfrEnable && available_hfr_configs.array()) {
7367        staticInfo.update(
7368                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7369                available_hfr_configs.array(), available_hfr_configs.size());
7370    }
7371
7372    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7373    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7374                      &max_jpeg_size, 1);
7375
7376    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7377    size_t size = 0;
7378    count = CAM_EFFECT_MODE_MAX;
7379    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7380    for (size_t i = 0; i < count; i++) {
7381        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7382                gCamCapability[cameraId]->supported_effects[i]);
7383        if (NAME_NOT_FOUND != val) {
7384            avail_effects[size] = (uint8_t)val;
7385            size++;
7386        }
7387    }
7388    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7389                      avail_effects,
7390                      size);
7391
7392    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7393    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7394    size_t supported_scene_modes_cnt = 0;
7395    count = CAM_SCENE_MODE_MAX;
7396    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7397    for (size_t i = 0; i < count; i++) {
7398        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7399                CAM_SCENE_MODE_OFF) {
7400            int val = lookupFwkName(SCENE_MODES_MAP,
7401                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
7402                    gCamCapability[cameraId]->supported_scene_modes[i]);
7403            if (NAME_NOT_FOUND != val) {
7404                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7405                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7406                supported_scene_modes_cnt++;
7407            }
7408        }
7409    }
7410    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7411                      avail_scene_modes,
7412                      supported_scene_modes_cnt);
7413
7414    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
7415    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7416                      supported_scene_modes_cnt,
7417                      CAM_SCENE_MODE_MAX,
7418                      scene_mode_overrides,
7419                      supported_indexes,
7420                      cameraId);
7421
7422    if (supported_scene_modes_cnt == 0) {
7423        supported_scene_modes_cnt = 1;
7424        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7425    }
7426
7427    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7428            scene_mode_overrides, supported_scene_modes_cnt * 3);
7429
7430    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7431                                         ANDROID_CONTROL_MODE_AUTO,
7432                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7433    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7434            available_control_modes,
7435            3);
7436
7437    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7438    size = 0;
7439    count = CAM_ANTIBANDING_MODE_MAX;
7440    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7441    for (size_t i = 0; i < count; i++) {
7442        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7443                gCamCapability[cameraId]->supported_antibandings[i]);
7444        if (NAME_NOT_FOUND != val) {
7445            avail_antibanding_modes[size] = (uint8_t)val;
7446            size++;
7447        }
7448
7449    }
7450    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7451                      avail_antibanding_modes,
7452                      size);
7453
7454    uint8_t avail_abberation_modes[] = {
7455            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7456            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7457            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7458    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7459    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7460    if (0 == count) {
7461        //  If no aberration correction modes are available for a device, this advertise OFF mode
7462        size = 1;
7463    } else {
7464        // If count is not zero then atleast one among the FAST or HIGH quality is supported
7465        // So, advertize all 3 modes if atleast any one mode is supported as per the
7466        // new M requirement
7467        size = 3;
7468    }
7469    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7470            avail_abberation_modes,
7471            size);
7472
7473    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7474    size = 0;
7475    count = CAM_FOCUS_MODE_MAX;
7476    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7477    for (size_t i = 0; i < count; i++) {
7478        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7479                gCamCapability[cameraId]->supported_focus_modes[i]);
7480        if (NAME_NOT_FOUND != val) {
7481            avail_af_modes[size] = (uint8_t)val;
7482            size++;
7483        }
7484    }
7485    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7486                      avail_af_modes,
7487                      size);
7488
7489    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7490    size = 0;
7491    count = CAM_WB_MODE_MAX;
7492    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7493    for (size_t i = 0; i < count; i++) {
7494        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7495                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7496                gCamCapability[cameraId]->supported_white_balances[i]);
7497        if (NAME_NOT_FOUND != val) {
7498            avail_awb_modes[size] = (uint8_t)val;
7499            size++;
7500        }
7501    }
7502    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7503                      avail_awb_modes,
7504                      size);
7505
7506    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7507    count = CAM_FLASH_FIRING_LEVEL_MAX;
7508    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7509            count);
7510    for (size_t i = 0; i < count; i++) {
7511        available_flash_levels[i] =
7512                gCamCapability[cameraId]->supported_firing_levels[i];
7513    }
7514    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7515            available_flash_levels, count);
7516
7517    uint8_t flashAvailable;
7518    if (gCamCapability[cameraId]->flash_available)
7519        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7520    else
7521        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7522    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7523            &flashAvailable, 1);
7524
7525    Vector<uint8_t> avail_ae_modes;
7526    count = CAM_AE_MODE_MAX;
7527    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7528    for (size_t i = 0; i < count; i++) {
7529        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7530    }
7531    if (flashAvailable) {
7532        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7533        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7534    }
7535    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7536                      avail_ae_modes.array(),
7537                      avail_ae_modes.size());
7538
7539    int32_t sensitivity_range[2];
7540    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7541    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7542    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7543                      sensitivity_range,
7544                      sizeof(sensitivity_range) / sizeof(int32_t));
7545
7546    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7547                      &gCamCapability[cameraId]->max_analog_sensitivity,
7548                      1);
7549
7550    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7551    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7552                      &sensor_orientation,
7553                      1);
7554
7555    int32_t max_output_streams[] = {
7556            MAX_STALLING_STREAMS,
7557            MAX_PROCESSED_STREAMS,
7558            MAX_RAW_STREAMS};
7559    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7560            max_output_streams,
7561            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7562
7563    uint8_t avail_leds = 0;
7564    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7565                      &avail_leds, 0);
7566
7567    uint8_t focus_dist_calibrated;
7568    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7569            gCamCapability[cameraId]->focus_dist_calibrated);
7570    if (NAME_NOT_FOUND != val) {
7571        focus_dist_calibrated = (uint8_t)val;
7572        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7573                     &focus_dist_calibrated, 1);
7574    }
7575
7576    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7577    size = 0;
7578    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7579            MAX_TEST_PATTERN_CNT);
7580    for (size_t i = 0; i < count; i++) {
7581        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7582                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7583        if (NAME_NOT_FOUND != testpatternMode) {
7584            avail_testpattern_modes[size] = testpatternMode;
7585            size++;
7586        }
7587    }
7588    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7589                      avail_testpattern_modes,
7590                      size);
7591
7592    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7593    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7594                      &max_pipeline_depth,
7595                      1);
7596
7597    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7598    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7599                      &partial_result_count,
7600                       1);
7601
7602    int32_t max_stall_duration = MAX_REPROCESS_STALL;
7603    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7604
7605    Vector<uint8_t> available_capabilities;
7606    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7607    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7608    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7609    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7610    if (supportBurst) {
7611        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7612    }
7613    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7614    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7615    if (hfrEnable && available_hfr_configs.array()) {
7616        available_capabilities.add(
7617                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7618    }
7619
7620    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7621        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7622    }
7623    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7624            available_capabilities.array(),
7625            available_capabilities.size());
7626
7627    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7628    //Assumption is that all bayer cameras support MANUAL_SENSOR.
7629    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7630            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7631
7632    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7633            &aeLockAvailable, 1);
7634
7635    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7636    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7637    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7638            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7639
7640    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7641            &awbLockAvailable, 1);
7642
7643    int32_t max_input_streams = 1;
7644    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7645                      &max_input_streams,
7646                      1);
7647
7648    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7649    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7650            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7651            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7652            HAL_PIXEL_FORMAT_YCbCr_420_888};
7653    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7654                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7655
7656    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7657    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7658                      &max_latency,
7659                      1);
7660
7661    int32_t isp_sensitivity_range[2];
7662    isp_sensitivity_range[0] =
7663        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7664    isp_sensitivity_range[1] =
7665        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7666    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7667                      isp_sensitivity_range,
7668                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7669
7670    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7671                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7672    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7673            available_hot_pixel_modes,
7674            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7675
7676    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7677                                         ANDROID_SHADING_MODE_FAST,
7678                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
7679    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7680                      available_shading_modes,
7681                      3);
7682
7683    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7684                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7685    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7686                      available_lens_shading_map_modes,
7687                      2);
7688
7689    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7690                                      ANDROID_EDGE_MODE_FAST,
7691                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
7692                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7693    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7694            available_edge_modes,
7695            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7696
7697    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7698                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
7699                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7700                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7701                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7702    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7703            available_noise_red_modes,
7704            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7705
7706    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7707                                         ANDROID_TONEMAP_MODE_FAST,
7708                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7709    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7710            available_tonemap_modes,
7711            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7712
7713    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7714    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7715            available_hot_pixel_map_modes,
7716            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7717
7718    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7719            gCamCapability[cameraId]->reference_illuminant1);
7720    if (NAME_NOT_FOUND != val) {
7721        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7722        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7723    }
7724
7725    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7726            gCamCapability[cameraId]->reference_illuminant2);
7727    if (NAME_NOT_FOUND != val) {
7728        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7729        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7730    }
7731
7732    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7733            (void *)gCamCapability[cameraId]->forward_matrix1,
7734            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7735
7736    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7737            (void *)gCamCapability[cameraId]->forward_matrix2,
7738            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7739
7740    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7741            (void *)gCamCapability[cameraId]->color_transform1,
7742            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7743
7744    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7745            (void *)gCamCapability[cameraId]->color_transform2,
7746            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7747
7748    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7749            (void *)gCamCapability[cameraId]->calibration_transform1,
7750            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7751
7752    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7753            (void *)gCamCapability[cameraId]->calibration_transform2,
7754            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7755
7756    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7757       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7758       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7759       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7760       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7761       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7762       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7763       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7764       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7765       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7766       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7767       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7768       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7769       ANDROID_JPEG_GPS_COORDINATES,
7770       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7771       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7772       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7773       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7774       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7775       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7776       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7777       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7778       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7779       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7780       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7781       ANDROID_STATISTICS_FACE_DETECT_MODE,
7782       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7783       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7784       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7785       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7786       /* DevCamDebug metadata request_keys_basic */
7787       DEVCAMDEBUG_META_ENABLE,
7788       /* DevCamDebug metadata end */
7789       };
7790
7791    size_t request_keys_cnt =
7792            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7793    Vector<int32_t> available_request_keys;
7794    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7795    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7796        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7797    }
7798
7799    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7800            available_request_keys.array(), available_request_keys.size());
7801
7802    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7803       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7804       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7805       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7806       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7807       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7808       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7809       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7810       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7811       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7812       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7813       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7814       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7815       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7816       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7817       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7818       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7819       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7820       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7821       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7822       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7823       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7824       ANDROID_STATISTICS_FACE_SCORES,
7825       NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7826       NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
7827       // DevCamDebug metadata result_keys_basic
7828       DEVCAMDEBUG_META_ENABLE,
7829       // DevCamDebug metadata result_keys AF
7830       DEVCAMDEBUG_AF_LENS_POSITION,
7831       DEVCAMDEBUG_AF_TOF_CONFIDENCE,
7832       DEVCAMDEBUG_AF_TOF_DISTANCE,
7833       DEVCAMDEBUG_AF_LUMA,
7834       DEVCAMDEBUG_AF_HAF_STATE,
7835       DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7836       DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7837       DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7838       DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7839       DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7840       DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7841       DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7842       DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7843       DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7844       DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7845       DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7846       DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7847       DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7848       DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7849       DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7850       DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7851       DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7852       DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7853       DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7854       DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7855       DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7856       DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7857       // DevCamDebug metadata result_keys AEC
7858       DEVCAMDEBUG_AEC_TARGET_LUMA,
7859       DEVCAMDEBUG_AEC_COMP_LUMA,
7860       DEVCAMDEBUG_AEC_AVG_LUMA,
7861       DEVCAMDEBUG_AEC_CUR_LUMA,
7862       DEVCAMDEBUG_AEC_LINECOUNT,
7863       DEVCAMDEBUG_AEC_REAL_GAIN,
7864       DEVCAMDEBUG_AEC_EXP_INDEX,
7865       DEVCAMDEBUG_AEC_LUX_IDX,
7866       // DevCamDebug metadata result_keys AWB
7867       DEVCAMDEBUG_AWB_R_GAIN,
7868       DEVCAMDEBUG_AWB_G_GAIN,
7869       DEVCAMDEBUG_AWB_B_GAIN,
7870       DEVCAMDEBUG_AWB_CCT,
7871       DEVCAMDEBUG_AWB_DECISION,
7872       /* DevCamDebug metadata end */
7873       };
7874    size_t result_keys_cnt =
7875            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7876
7877    Vector<int32_t> available_result_keys;
7878    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7879    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7880        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7881    }
7882    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7883        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7884        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7885    }
7886    if (supportedFaceDetectMode == 1) {
7887        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7888        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7889    } else if ((supportedFaceDetectMode == 2) ||
7890            (supportedFaceDetectMode == 3)) {
7891        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7892        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7893    }
7894    if (hasBlackRegions) {
7895        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7896        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7897    }
7898    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7899            available_result_keys.array(), available_result_keys.size());
7900
7901    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7902       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7903       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7904       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7905       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7906       ANDROID_SCALER_CROPPING_TYPE,
7907       ANDROID_SYNC_MAX_LATENCY,
7908       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7909       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7910       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7911       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7912       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7913       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7914       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7915       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7916       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7917       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7918       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7919       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7920       ANDROID_LENS_FACING,
7921       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7922       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7923       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7924       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7925       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7926       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7927       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7928       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7929       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7930       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7931       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7932       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7933       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7934       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7935       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7936       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7937       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7938       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7939       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7940       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7941       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7942       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7943       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7944       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7945       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7946       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7947       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7948       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7949       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7950       ANDROID_CONTROL_AVAILABLE_MODES,
7951       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7952       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7953       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7954       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7955       ANDROID_SHADING_AVAILABLE_MODES,
7956       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7957       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7958
7959    Vector<int32_t> available_characteristics_keys;
7960    available_characteristics_keys.appendArray(characteristics_keys_basic,
7961            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7962    if (hasBlackRegions) {
7963        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7964    }
7965    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7966                      available_characteristics_keys.array(),
7967                      available_characteristics_keys.size());
7968
7969    /*available stall durations depend on the hw + sw and will be different for different devices */
7970    /*have to add for raw after implementation*/
7971    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7972    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7973
7974    Vector<int64_t> available_stall_durations;
7975    for (uint32_t j = 0; j < stall_formats_count; j++) {
7976        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7977            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7978                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7979                available_stall_durations.add(stall_formats[j]);
7980                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7981                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7982                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7983          }
7984        } else {
7985            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7986                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7987                available_stall_durations.add(stall_formats[j]);
7988                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7989                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7990                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7991            }
7992        }
7993    }
7994    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7995                      available_stall_durations.array(),
7996                      available_stall_durations.size());
7997
7998    //QCAMERA3_OPAQUE_RAW
7999    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8000    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8001    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8002    case LEGACY_RAW:
8003        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8004            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8005        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8006            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8007        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8008            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8009        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8010        break;
8011    case MIPI_RAW:
8012        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8013            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8014        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8015            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8016        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8017            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8018        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8019        break;
8020    default:
8021        LOGE("unknown opaque_raw_format %d",
8022                gCamCapability[cameraId]->opaque_raw_fmt);
8023        break;
8024    }
8025    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8026
8027    Vector<int32_t> strides;
8028    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8029            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8030        cam_stream_buf_plane_info_t buf_planes;
8031        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8032        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8033        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8034            &gCamCapability[cameraId]->padding_info, &buf_planes);
8035        strides.add(buf_planes.plane_info.mp[0].stride);
8036    }
8037    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8038            strides.size());
8039
8040    Vector<int32_t> opaque_size;
8041    for (size_t j = 0; j < scalar_formats_count; j++) {
8042        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8043            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8044                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8045                cam_stream_buf_plane_info_t buf_planes;
8046
8047                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8048                         &gCamCapability[cameraId]->padding_info, &buf_planes);
8049
8050                if (rc == 0) {
8051                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8052                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8053                    opaque_size.add(buf_planes.plane_info.frame_len);
8054                }else {
8055                    LOGE("raw frame calculation failed!");
8056                }
8057            }
8058        }
8059    }
8060
8061    if ((opaque_size.size() > 0) &&
8062            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8063        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8064    else
8065        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8066
8067    gStaticMetadata[cameraId] = staticInfo.release();
8068    return rc;
8069}
8070
8071/*===========================================================================
8072 * FUNCTION   : makeTable
8073 *
8074 * DESCRIPTION: make a table of sizes
8075 *
8076 * PARAMETERS :
8077 *
8078 *
8079 *==========================================================================*/
8080void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8081        size_t max_size, int32_t *sizeTable)
8082{
8083    size_t j = 0;
8084    if (size > max_size) {
8085       size = max_size;
8086    }
8087    for (size_t i = 0; i < size; i++) {
8088        sizeTable[j] = dimTable[i].width;
8089        sizeTable[j+1] = dimTable[i].height;
8090        j+=2;
8091    }
8092}
8093
8094/*===========================================================================
8095 * FUNCTION   : makeFPSTable
8096 *
8097 * DESCRIPTION: make a table of fps ranges
8098 *
8099 * PARAMETERS :
8100 *
8101 *==========================================================================*/
8102void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8103        size_t max_size, int32_t *fpsRangesTable)
8104{
8105    size_t j = 0;
8106    if (size > max_size) {
8107       size = max_size;
8108    }
8109    for (size_t i = 0; i < size; i++) {
8110        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8111        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8112        j+=2;
8113    }
8114}
8115
8116/*===========================================================================
8117 * FUNCTION   : makeOverridesList
8118 *
8119 * DESCRIPTION: make a list of scene mode overrides
8120 *
8121 * PARAMETERS :
8122 *
8123 *
8124 *==========================================================================*/
8125void QCamera3HardwareInterface::makeOverridesList(
8126        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8127        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8128{
8129    /*daemon will give a list of overrides for all scene modes.
8130      However we should send the fwk only the overrides for the scene modes
8131      supported by the framework*/
8132    size_t j = 0;
8133    if (size > max_size) {
8134       size = max_size;
8135    }
8136    size_t focus_count = CAM_FOCUS_MODE_MAX;
8137    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8138            focus_count);
8139    for (size_t i = 0; i < size; i++) {
8140        bool supt = false;
8141        size_t index = supported_indexes[i];
8142        overridesList[j] = gCamCapability[camera_id]->flash_available ?
8143                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8144        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8145                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8146                overridesTable[index].awb_mode);
8147        if (NAME_NOT_FOUND != val) {
8148            overridesList[j+1] = (uint8_t)val;
8149        }
8150        uint8_t focus_override = overridesTable[index].af_mode;
8151        for (size_t k = 0; k < focus_count; k++) {
8152           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8153              supt = true;
8154              break;
8155           }
8156        }
8157        if (supt) {
8158            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8159                    focus_override);
8160            if (NAME_NOT_FOUND != val) {
8161                overridesList[j+2] = (uint8_t)val;
8162            }
8163        } else {
8164           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8165        }
8166        j+=3;
8167    }
8168}
8169
8170/*===========================================================================
8171 * FUNCTION   : filterJpegSizes
8172 *
8173 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8174 *              could be downscaled to
8175 *
8176 * PARAMETERS :
8177 *
8178 * RETURN     : length of jpegSizes array
8179 *==========================================================================*/
8180
8181size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8182        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8183        uint8_t downscale_factor)
8184{
8185    if (0 == downscale_factor) {
8186        downscale_factor = 1;
8187    }
8188
8189    int32_t min_width = active_array_size.width / downscale_factor;
8190    int32_t min_height = active_array_size.height / downscale_factor;
8191    size_t jpegSizesCnt = 0;
8192    if (processedSizesCnt > maxCount) {
8193        processedSizesCnt = maxCount;
8194    }
8195    for (size_t i = 0; i < processedSizesCnt; i+=2) {
8196        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8197            jpegSizes[jpegSizesCnt] = processedSizes[i];
8198            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8199            jpegSizesCnt += 2;
8200        }
8201    }
8202    return jpegSizesCnt;
8203}
8204
8205/*===========================================================================
8206 * FUNCTION   : computeNoiseModelEntryS
8207 *
8208 * DESCRIPTION: function to map a given sensitivity to the S noise
8209 *              model parameters in the DNG noise model.
8210 *
8211 * PARAMETERS : sens : the sensor sensitivity
8212 *
8213 ** RETURN    : S (sensor amplification) noise
8214 *
8215 *==========================================================================*/
8216double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8217    double s = gCamCapability[mCameraId]->gradient_S * sens +
8218            gCamCapability[mCameraId]->offset_S;
8219    return ((s < 0.0) ? 0.0 : s);
8220}
8221
8222/*===========================================================================
8223 * FUNCTION   : computeNoiseModelEntryO
8224 *
8225 * DESCRIPTION: function to map a given sensitivity to the O noise
8226 *              model parameters in the DNG noise model.
8227 *
8228 * PARAMETERS : sens : the sensor sensitivity
8229 *
8230 ** RETURN    : O (sensor readout) noise
8231 *
8232 *==========================================================================*/
8233double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8234    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8235    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8236            1.0 : (1.0 * sens / max_analog_sens);
8237    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8238            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8239    return ((o < 0.0) ? 0.0 : o);
8240}
8241
8242/*===========================================================================
8243 * FUNCTION   : getSensorSensitivity
8244 *
8245 * DESCRIPTION: convert iso_mode to an integer value
8246 *
8247 * PARAMETERS : iso_mode : the iso_mode supported by sensor
8248 *
8249 ** RETURN    : sensitivity supported by sensor
8250 *
8251 *==========================================================================*/
8252int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8253{
8254    int32_t sensitivity;
8255
8256    switch (iso_mode) {
8257    case CAM_ISO_MODE_100:
8258        sensitivity = 100;
8259        break;
8260    case CAM_ISO_MODE_200:
8261        sensitivity = 200;
8262        break;
8263    case CAM_ISO_MODE_400:
8264        sensitivity = 400;
8265        break;
8266    case CAM_ISO_MODE_800:
8267        sensitivity = 800;
8268        break;
8269    case CAM_ISO_MODE_1600:
8270        sensitivity = 1600;
8271        break;
8272    default:
8273        sensitivity = -1;
8274        break;
8275    }
8276    return sensitivity;
8277}
8278
8279/*===========================================================================
8280 * FUNCTION   : getCamInfo
8281 *
8282 * DESCRIPTION: query camera capabilities
8283 *
8284 * PARAMETERS :
8285 *   @cameraId  : camera Id
8286 *   @info      : camera info struct to be filled in with camera capabilities
8287 *
8288 * RETURN     : int type of status
8289 *              NO_ERROR  -- success
8290 *              none-zero failure code
8291 *==========================================================================*/
8292int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8293        struct camera_info *info)
8294{
8295    ATRACE_CALL();
8296    int rc = 0;
8297
8298    pthread_mutex_lock(&gCamLock);
8299    if (NULL == gCamCapability[cameraId]) {
8300        rc = initCapabilities(cameraId);
8301        if (rc < 0) {
8302            pthread_mutex_unlock(&gCamLock);
8303            return rc;
8304        }
8305    }
8306
8307    if (NULL == gStaticMetadata[cameraId]) {
8308        rc = initStaticMetadata(cameraId);
8309        if (rc < 0) {
8310            pthread_mutex_unlock(&gCamLock);
8311            return rc;
8312        }
8313    }
8314
8315    switch(gCamCapability[cameraId]->position) {
8316    case CAM_POSITION_BACK:
8317        info->facing = CAMERA_FACING_BACK;
8318        break;
8319
8320    case CAM_POSITION_FRONT:
8321        info->facing = CAMERA_FACING_FRONT;
8322        break;
8323
8324    default:
8325        LOGE("Unknown position type for camera id:%d", cameraId);
8326        rc = -1;
8327        break;
8328    }
8329
8330
8331    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8332    info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8333    info->static_camera_characteristics = gStaticMetadata[cameraId];
8334
8335    //For now assume both cameras can operate independently.
8336    info->conflicting_devices = NULL;
8337    info->conflicting_devices_length = 0;
8338
8339    //resource cost is 100 * MIN(1.0, m/M),
8340    //where m is throughput requirement with maximum stream configuration
8341    //and M is CPP maximum throughput.
8342    float max_fps = 0.0;
8343    for (uint32_t i = 0;
8344            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8345        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8346            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8347    }
8348    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8349            gCamCapability[cameraId]->active_array_size.width *
8350            gCamCapability[cameraId]->active_array_size.height * max_fps /
8351            gCamCapability[cameraId]->max_pixel_bandwidth;
8352    info->resource_cost = 100 * MIN(1.0, ratio);
8353    LOGI("camera %d resource cost is %d", cameraId,
8354            info->resource_cost);
8355
8356    pthread_mutex_unlock(&gCamLock);
8357    return rc;
8358}
8359
8360/*===========================================================================
8361 * FUNCTION   : translateCapabilityToMetadata
8362 *
8363 * DESCRIPTION: translate the capability into camera_metadata_t
8364 *
8365 * PARAMETERS : type of the request
8366 *
8367 *
8368 * RETURN     : success: camera_metadata_t*
8369 *              failure: NULL
8370 *
8371 *==========================================================================*/
8372camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8373{
8374    if (mDefaultMetadata[type] != NULL) {
8375        return mDefaultMetadata[type];
8376    }
8377    //first time we are handling this request
8378    //fill up the metadata structure using the wrapper class
8379    CameraMetadata settings;
8380    //translate from cam_capability_t to camera_metadata_tag_t
8381    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8382    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8383    int32_t defaultRequestID = 0;
8384    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8385
8386    /* OIS disable */
8387    char ois_prop[PROPERTY_VALUE_MAX];
8388    memset(ois_prop, 0, sizeof(ois_prop));
8389    property_get("persist.camera.ois.disable", ois_prop, "0");
8390    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8391
8392    /* Force video to use OIS */
8393    char videoOisProp[PROPERTY_VALUE_MAX];
8394    memset(videoOisProp, 0, sizeof(videoOisProp));
8395    property_get("persist.camera.ois.video", videoOisProp, "1");
8396    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8397
8398    // EIS enable/disable
8399    char eis_prop[PROPERTY_VALUE_MAX];
8400    memset(eis_prop, 0, sizeof(eis_prop));
8401    property_get("persist.camera.eis.enable", eis_prop, "0");
8402    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
8403
8404    // Hybrid AE enable/disable
8405    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8406    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8407    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8408    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8409
8410    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
8411    // This is a bit hacky. EIS is enabled only when the above setprop
8412    // is set to non-zero value and on back camera (for 2015 Nexus).
8413    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
8414    // configureStream is called before this function. In other words,
8415    // we cannot guarantee the app will call configureStream before
8416    // calling createDefaultRequest.
8417    const bool eisEnabled = facingBack && eis_prop_set;
8418
8419    uint8_t controlIntent = 0;
8420    uint8_t focusMode;
8421    uint8_t vsMode;
8422    uint8_t optStabMode;
8423    uint8_t cacMode;
8424    uint8_t edge_mode;
8425    uint8_t noise_red_mode;
8426    uint8_t tonemap_mode;
8427    bool highQualityModeEntryAvailable = FALSE;
8428    bool fastModeEntryAvailable = FALSE;
8429    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8430    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8431    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8432
8433    switch (type) {
8434      case CAMERA3_TEMPLATE_PREVIEW:
8435        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8436        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8437        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8438        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8439        edge_mode = ANDROID_EDGE_MODE_FAST;
8440        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8441        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8442        break;
8443      case CAMERA3_TEMPLATE_STILL_CAPTURE:
8444        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8445        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8446        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8447        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8448        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8449        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8450        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8451        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8452        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8453            if (gCamCapability[mCameraId]->aberration_modes[i] ==
8454                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8455                highQualityModeEntryAvailable = TRUE;
8456            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8457                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8458                fastModeEntryAvailable = TRUE;
8459            }
8460        }
8461        if (highQualityModeEntryAvailable) {
8462            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8463        } else if (fastModeEntryAvailable) {
8464            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8465        }
8466        if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8467            shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8468        }
8469        break;
8470      case CAMERA3_TEMPLATE_VIDEO_RECORD:
8471        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8472        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8473        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8474        if (eisEnabled) {
8475            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8476        }
8477        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8478        edge_mode = ANDROID_EDGE_MODE_FAST;
8479        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8480        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8481        if (forceVideoOis)
8482            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8483        break;
8484      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8485        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8486        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8487        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8488        if (eisEnabled) {
8489            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8490        }
8491        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8492        edge_mode = ANDROID_EDGE_MODE_FAST;
8493        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8494        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8495        if (forceVideoOis)
8496            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8497        break;
8498      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8499        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8500        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8501        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8502        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8503        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8504        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8505        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8506        break;
8507      case CAMERA3_TEMPLATE_MANUAL:
8508        edge_mode = ANDROID_EDGE_MODE_FAST;
8509        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8510        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8511        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8512        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8513        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8514        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8515        break;
8516      default:
8517        edge_mode = ANDROID_EDGE_MODE_FAST;
8518        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8519        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8520        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8521        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8522        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8523        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8524        break;
8525    }
8526    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8527    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8528    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8529    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8530        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8531    }
8532    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8533
8534    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8535            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8536        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8537    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8538            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8539            || ois_disable)
8540        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8541    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8542    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8543
8544    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8545            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8546
8547    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8548    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8549
8550    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8551    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8552
8553    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8554    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8555
8556    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8557    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8558
8559    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8560    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8561
8562    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8563    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8564
8565    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8566    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8567
8568    /*flash*/
8569    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8570    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8571
8572    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8573    settings.update(ANDROID_FLASH_FIRING_POWER,
8574            &flashFiringLevel, 1);
8575
8576    /* lens */
8577    float default_aperture = gCamCapability[mCameraId]->apertures[0];
8578    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8579
8580    if (gCamCapability[mCameraId]->filter_densities_count) {
8581        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8582        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8583                        gCamCapability[mCameraId]->filter_densities_count);
8584    }
8585
8586    float default_focal_length = gCamCapability[mCameraId]->focal_length;
8587    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8588
8589    if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8590        float default_focus_distance = 0;
8591        settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8592    }
8593
8594    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8595    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8596
8597    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8598    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8599
8600    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8601    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8602
8603    /* face detection (default to OFF) */
8604    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8605    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8606
8607    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8608    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8609
8610    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8611    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8612
8613    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8614    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8615
8616
8617    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8618    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8619
8620    /* Exposure time(Update the Min Exposure Time)*/
8621    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8622    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8623
8624    /* frame duration */
8625    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8626    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8627
8628    /* sensitivity */
8629    static const int32_t default_sensitivity = 100;
8630    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8631    static const int32_t default_isp_sensitivity =
8632            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8633    settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8634
8635    /*edge mode*/
8636    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8637
8638    /*noise reduction mode*/
8639    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8640
8641    /*color correction mode*/
8642    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8643    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8644
8645    /*transform matrix mode*/
8646    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8647
8648    int32_t scaler_crop_region[4];
8649    scaler_crop_region[0] = 0;
8650    scaler_crop_region[1] = 0;
8651    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8652    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8653    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8654
8655    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8656    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8657
8658    /*focus distance*/
8659    float focus_distance = 0.0;
8660    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8661
8662    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8663    /* Restrict default preview template to max 30 fps */
8664    float max_range = 0.0;
8665    float max_fixed_fps = 0.0;
8666    int32_t fps_range[2] = {0, 0};
8667    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8668            i++) {
8669        if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8670                TEMPLATE_MAX_PREVIEW_FPS) {
8671            continue;
8672        }
8673        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8674            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8675        if (type == CAMERA3_TEMPLATE_PREVIEW ||
8676                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8677                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8678            if (range > max_range) {
8679                fps_range[0] =
8680                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8681                fps_range[1] =
8682                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8683                max_range = range;
8684            }
8685        } else {
8686            if (range < 0.01 && max_fixed_fps <
8687                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8688                fps_range[0] =
8689                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8690                fps_range[1] =
8691                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8692                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8693            }
8694        }
8695    }
8696    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8697
8698    /*precapture trigger*/
8699    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8700    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8701
8702    /*af trigger*/
8703    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8704    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8705
8706    /* ae & af regions */
8707    int32_t active_region[] = {
8708            gCamCapability[mCameraId]->active_array_size.left,
8709            gCamCapability[mCameraId]->active_array_size.top,
8710            gCamCapability[mCameraId]->active_array_size.left +
8711                    gCamCapability[mCameraId]->active_array_size.width,
8712            gCamCapability[mCameraId]->active_array_size.top +
8713                    gCamCapability[mCameraId]->active_array_size.height,
8714            0};
8715    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8716            sizeof(active_region) / sizeof(active_region[0]));
8717    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8718            sizeof(active_region) / sizeof(active_region[0]));
8719
8720    /* black level lock */
8721    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8722    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8723
8724    //special defaults for manual template
8725    if (type == CAMERA3_TEMPLATE_MANUAL) {
8726        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8727        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8728
8729        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8730        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8731
8732        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8733        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8734
8735        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8736        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8737
8738        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8739        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8740
8741        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8742        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8743    }
8744
8745
8746    /* TNR
8747     * We'll use this location to determine which modes TNR will be set.
8748     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8749     * This is not to be confused with linking on a per stream basis that decision
8750     * is still on per-session basis and will be handled as part of config stream
8751     */
8752    uint8_t tnr_enable = 0;
8753
8754    if (m_bTnrPreview || m_bTnrVideo) {
8755
8756        switch (type) {
8757            case CAMERA3_TEMPLATE_VIDEO_RECORD:
8758                    tnr_enable = 1;
8759                    break;
8760
8761            default:
8762                    tnr_enable = 0;
8763                    break;
8764        }
8765
8766        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8767        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8768        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8769
8770        LOGD("TNR:%d with process plate %d for template:%d",
8771                             tnr_enable, tnr_process_type, type);
8772    }
8773
8774    //Update Link tags to default
8775    int32_t sync_type = CAM_TYPE_STANDALONE;
8776    settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8777
8778    int32_t is_main = 0; //this doesn't matter as app should overwrite
8779    settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8780
8781    settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8782
8783    /* CDS default */
8784    char prop[PROPERTY_VALUE_MAX];
8785    memset(prop, 0, sizeof(prop));
8786    property_get("persist.camera.CDS", prop, "Auto");
8787    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8788    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8789    if (CAM_CDS_MODE_MAX == cds_mode) {
8790        cds_mode = CAM_CDS_MODE_AUTO;
8791    }
8792
8793    /* Disabling CDS in templates which have TNR enabled*/
8794    if (tnr_enable)
8795        cds_mode = CAM_CDS_MODE_OFF;
8796
8797    int32_t mode = cds_mode;
8798    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8799
8800    /* hybrid ae */
8801    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8802
8803    mDefaultMetadata[type] = settings.release();
8804
8805    return mDefaultMetadata[type];
8806}
8807
8808/*===========================================================================
8809 * FUNCTION   : setFrameParameters
8810 *
8811 * DESCRIPTION: set parameters per frame as requested in the metadata from
8812 *              framework
8813 *
8814 * PARAMETERS :
8815 *   @request   : request that needs to be serviced
8816 *   @streamsArray : Stream ID of all the requested streams
8817 *   @blob_request: Whether this request is a blob request or not
8818 *
8819 * RETURN     : success: NO_ERROR
8820 *              failure:
8821 *==========================================================================*/
8822int QCamera3HardwareInterface::setFrameParameters(
8823                    camera3_capture_request_t *request,
8824                    cam_stream_ID_t streamsArray,
8825                    int blob_request,
8826                    uint32_t snapshotStreamId)
8827{
8828    /*translate from camera_metadata_t type to parm_type_t*/
8829    int rc = 0;
8830    int32_t hal_version = CAM_HAL_V3;
8831
8832    clear_metadata_buffer(mParameters);
8833    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8834        LOGE("Failed to set hal version in the parameters");
8835        return BAD_VALUE;
8836    }
8837
8838    /*we need to update the frame number in the parameters*/
8839    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8840            request->frame_number)) {
8841        LOGE("Failed to set the frame number in the parameters");
8842        return BAD_VALUE;
8843    }
8844
8845    /* Update stream id of all the requested buffers */
8846    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
8847        LOGE("Failed to set stream type mask in the parameters");
8848        return BAD_VALUE;
8849    }
8850
8851    if (mUpdateDebugLevel) {
8852        uint32_t dummyDebugLevel = 0;
8853        /* The value of dummyDebugLevel is irrelavent. On
8854         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8855        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8856                dummyDebugLevel)) {
8857            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8858            return BAD_VALUE;
8859        }
8860        mUpdateDebugLevel = false;
8861    }
8862
8863    if(request->settings != NULL){
8864        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8865        if (blob_request)
8866            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8867    }
8868
8869    return rc;
8870}
8871
8872/*===========================================================================
8873 * FUNCTION   : setReprocParameters
8874 *
8875 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8876 *              return it.
8877 *
8878 * PARAMETERS :
8879 *   @request   : request that needs to be serviced
8880 *
8881 * RETURN     : success: NO_ERROR
8882 *              failure:
8883 *==========================================================================*/
8884int32_t QCamera3HardwareInterface::setReprocParameters(
8885        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8886        uint32_t snapshotStreamId)
8887{
8888    /*translate from camera_metadata_t type to parm_type_t*/
8889    int rc = 0;
8890
8891    if (NULL == request->settings){
8892        LOGE("Reprocess settings cannot be NULL");
8893        return BAD_VALUE;
8894    }
8895
8896    if (NULL == reprocParam) {
8897        LOGE("Invalid reprocessing metadata buffer");
8898        return BAD_VALUE;
8899    }
8900    clear_metadata_buffer(reprocParam);
8901
8902    /*we need to update the frame number in the parameters*/
8903    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8904            request->frame_number)) {
8905        LOGE("Failed to set the frame number in the parameters");
8906        return BAD_VALUE;
8907    }
8908
8909    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8910    if (rc < 0) {
8911        LOGE("Failed to translate reproc request");
8912        return rc;
8913    }
8914
8915    CameraMetadata frame_settings;
8916    frame_settings = request->settings;
8917    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8918            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8919        int32_t *crop_count =
8920                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8921        int32_t *crop_data =
8922                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8923        int32_t *roi_map =
8924                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8925        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8926            cam_crop_data_t crop_meta;
8927            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8928            crop_meta.num_of_streams = 1;
8929            crop_meta.crop_info[0].crop.left   = crop_data[0];
8930            crop_meta.crop_info[0].crop.top    = crop_data[1];
8931            crop_meta.crop_info[0].crop.width  = crop_data[2];
8932            crop_meta.crop_info[0].crop.height = crop_data[3];
8933
8934            crop_meta.crop_info[0].roi_map.left =
8935                    roi_map[0];
8936            crop_meta.crop_info[0].roi_map.top =
8937                    roi_map[1];
8938            crop_meta.crop_info[0].roi_map.width =
8939                    roi_map[2];
8940            crop_meta.crop_info[0].roi_map.height =
8941                    roi_map[3];
8942
8943            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8944                rc = BAD_VALUE;
8945            }
8946            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8947                    request->input_buffer->stream,
8948                    crop_meta.crop_info[0].crop.left,
8949                    crop_meta.crop_info[0].crop.top,
8950                    crop_meta.crop_info[0].crop.width,
8951                    crop_meta.crop_info[0].crop.height);
8952            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8953                    request->input_buffer->stream,
8954                    crop_meta.crop_info[0].roi_map.left,
8955                    crop_meta.crop_info[0].roi_map.top,
8956                    crop_meta.crop_info[0].roi_map.width,
8957                    crop_meta.crop_info[0].roi_map.height);
8958            } else {
8959                LOGE("Invalid reprocess crop count %d!", *crop_count);
8960            }
8961    } else {
8962        LOGE("No crop data from matching output stream");
8963    }
8964
8965    /* These settings are not needed for regular requests so handle them specially for
8966       reprocess requests; information needed for EXIF tags */
8967    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8968        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8969                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8970        if (NAME_NOT_FOUND != val) {
8971            uint32_t flashMode = (uint32_t)val;
8972            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8973                rc = BAD_VALUE;
8974            }
8975        } else {
8976            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8977                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8978        }
8979    } else {
8980        LOGH("No flash mode in reprocess settings");
8981    }
8982
8983    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8984        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8985        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8986            rc = BAD_VALUE;
8987        }
8988    } else {
8989        LOGH("No flash state in reprocess settings");
8990    }
8991
8992    return rc;
8993}
8994
8995/*===========================================================================
8996 * FUNCTION   : saveRequestSettings
8997 *
8998 * DESCRIPTION: Add any settings that might have changed to the request settings
8999 *              and save the settings to be applied on the frame
9000 *
9001 * PARAMETERS :
9002 *   @jpegMetadata : the extracted and/or modified jpeg metadata
9003 *   @request      : request with initial settings
9004 *
9005 * RETURN     :
9006 * camera_metadata_t* : pointer to the saved request settings
9007 *==========================================================================*/
9008camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
9009        const CameraMetadata &jpegMetadata,
9010        camera3_capture_request_t *request)
9011{
9012    camera_metadata_t *resultMetadata;
9013    CameraMetadata camMetadata;
9014    camMetadata = request->settings;
9015
9016    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9017        int32_t thumbnail_size[2];
9018        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9019        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9020        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
9021                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9022    }
9023
9024    resultMetadata = camMetadata.release();
9025    return resultMetadata;
9026}
9027
9028/*===========================================================================
9029 * FUNCTION   : setHalFpsRange
9030 *
9031 * DESCRIPTION: set FPS range parameter
9032 *
9033 *
9034 * PARAMETERS :
9035 *   @settings    : Metadata from framework
9036 *   @hal_metadata: Metadata buffer
9037 *
9038 *
9039 * RETURN     : success: NO_ERROR
9040 *              failure:
9041 *==========================================================================*/
9042int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
9043        metadata_buffer_t *hal_metadata)
9044{
9045    int32_t rc = NO_ERROR;
9046    cam_fps_range_t fps_range;
9047    fps_range.min_fps = (float)
9048            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
9049    fps_range.max_fps = (float)
9050            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
9051    fps_range.video_min_fps = fps_range.min_fps;
9052    fps_range.video_max_fps = fps_range.max_fps;
9053
9054    LOGD("aeTargetFpsRange fps: [%f %f]",
9055            fps_range.min_fps, fps_range.max_fps);
9056    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
9057     * follows:
9058     * ---------------------------------------------------------------|
9059     *      Video stream is absent in configure_streams               |
9060     *    (Camcorder preview before the first video record            |
9061     * ---------------------------------------------------------------|
9062     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9063     *                   |             |             | vid_min/max_fps|
9064     * ---------------------------------------------------------------|
9065     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9066     *                   |-------------|-------------|----------------|
9067     *                   |  [240, 240] |     240     |  [240, 240]    |
9068     * ---------------------------------------------------------------|
9069     *     Video stream is present in configure_streams               |
9070     * ---------------------------------------------------------------|
9071     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9072     *                   |             |             | vid_min/max_fps|
9073     * ---------------------------------------------------------------|
9074     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9075     * (camcorder prev   |-------------|-------------|----------------|
9076     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
9077     *  is stopped)      |             |             |                |
9078     * ---------------------------------------------------------------|
9079     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
9080     *                   |-------------|-------------|----------------|
9081     *                   |  [240, 240] |     240     |  [240, 240]    |
9082     * ---------------------------------------------------------------|
9083     * When Video stream is absent in configure_streams,
9084     * preview fps = sensor_fps / batchsize
9085     * Eg: for 240fps at batchSize 4, preview = 60fps
9086     *     for 120fps at batchSize 4, preview = 30fps
9087     *
9088     * When video stream is present in configure_streams, preview fps is as per
9089     * the ratio of preview buffers to video buffers requested in process
9090     * capture request
9091     */
9092    mBatchSize = 0;
9093    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9094        fps_range.min_fps = fps_range.video_max_fps;
9095        fps_range.video_min_fps = fps_range.video_max_fps;
9096        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9097                fps_range.max_fps);
9098        if (NAME_NOT_FOUND != val) {
9099            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9100            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9101                return BAD_VALUE;
9102            }
9103
9104            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9105                /* If batchmode is currently in progress and the fps changes,
9106                 * set the flag to restart the sensor */
9107                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9108                        (mHFRVideoFps != fps_range.max_fps)) {
9109                    mNeedSensorRestart = true;
9110                }
9111                mHFRVideoFps = fps_range.max_fps;
9112                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9113                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9114                    mBatchSize = MAX_HFR_BATCH_SIZE;
9115                }
9116             }
9117            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9118
9119         }
9120    } else {
9121        /* HFR mode is session param in backend/ISP. This should be reset when
9122         * in non-HFR mode  */
9123        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9124        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9125            return BAD_VALUE;
9126        }
9127    }
9128    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9129        return BAD_VALUE;
9130    }
9131    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9132            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9133    return rc;
9134}
9135
9136/*===========================================================================
9137 * FUNCTION   : translateToHalMetadata
9138 *
9139 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9140 *
9141 *
9142 * PARAMETERS :
9143 *   @request  : request sent from framework
9144 *
9145 *
9146 * RETURN     : success: NO_ERROR
9147 *              failure:
9148 *==========================================================================*/
9149int QCamera3HardwareInterface::translateToHalMetadata
9150                                  (const camera3_capture_request_t *request,
9151                                   metadata_buffer_t *hal_metadata,
9152                                   uint32_t snapshotStreamId)
9153{
9154    int rc = 0;
9155    CameraMetadata frame_settings;
9156    frame_settings = request->settings;
9157
9158    /* Do not change the order of the following list unless you know what you are
9159     * doing.
9160     * The order is laid out in such a way that parameters in the front of the table
9161     * may be used to override the parameters later in the table. Examples are:
9162     * 1. META_MODE should precede AEC/AWB/AF MODE
9163     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9164     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9165     * 4. Any mode should precede it's corresponding settings
9166     */
9167    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9168        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9169        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9170            rc = BAD_VALUE;
9171        }
9172        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9173        if (rc != NO_ERROR) {
9174            LOGE("extractSceneMode failed");
9175        }
9176    }
9177
9178    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9179        uint8_t fwk_aeMode =
9180            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9181        uint8_t aeMode;
9182        int32_t redeye;
9183
9184        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9185            aeMode = CAM_AE_MODE_OFF;
9186        } else {
9187            aeMode = CAM_AE_MODE_ON;
9188        }
9189        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9190            redeye = 1;
9191        } else {
9192            redeye = 0;
9193        }
9194
9195        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9196                fwk_aeMode);
9197        if (NAME_NOT_FOUND != val) {
9198            int32_t flashMode = (int32_t)val;
9199            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9200        }
9201
9202        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9203        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9204            rc = BAD_VALUE;
9205        }
9206    }
9207
9208    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9209        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9210        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9211                fwk_whiteLevel);
9212        if (NAME_NOT_FOUND != val) {
9213            uint8_t whiteLevel = (uint8_t)val;
9214            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9215                rc = BAD_VALUE;
9216            }
9217        }
9218    }
9219
9220    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9221        uint8_t fwk_cacMode =
9222                frame_settings.find(
9223                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9224        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9225                fwk_cacMode);
9226        if (NAME_NOT_FOUND != val) {
9227            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9228            bool entryAvailable = FALSE;
9229            // Check whether Frameworks set CAC mode is supported in device or not
9230            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9231                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9232                    entryAvailable = TRUE;
9233                    break;
9234                }
9235            }
9236            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9237            // If entry not found then set the device supported mode instead of frameworks mode i.e,
9238            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9239            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9240            if (entryAvailable == FALSE) {
9241                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9242                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9243                } else {
9244                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9245                        // High is not supported and so set the FAST as spec say's underlying
9246                        // device implementation can be the same for both modes.
9247                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9248                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9249                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9250                        // in order to avoid the fps drop due to high quality
9251                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9252                    } else {
9253                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9254                    }
9255                }
9256            }
9257            LOGD("Final cacMode is %d", cacMode);
9258            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9259                rc = BAD_VALUE;
9260            }
9261        } else {
9262            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9263        }
9264    }
9265
9266    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9267        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9268        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9269                fwk_focusMode);
9270        if (NAME_NOT_FOUND != val) {
9271            uint8_t focusMode = (uint8_t)val;
9272            LOGD("set focus mode %d", focusMode);
9273            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9274                rc = BAD_VALUE;
9275            }
9276        }
9277    }
9278
9279    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9280        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9281        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9282                focalDistance)) {
9283            rc = BAD_VALUE;
9284        }
9285    }
9286
9287    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9288        uint8_t fwk_antibandingMode =
9289                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9290        int val = lookupHalName(ANTIBANDING_MODES_MAP,
9291                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9292        if (NAME_NOT_FOUND != val) {
9293            uint32_t hal_antibandingMode = (uint32_t)val;
9294            if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
9295                if (m60HzZone) {
9296                    hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
9297                } else {
9298                    hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
9299                }
9300            }
9301            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9302                    hal_antibandingMode)) {
9303                rc = BAD_VALUE;
9304            }
9305        }
9306    }
9307
9308    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9309        int32_t expCompensation = frame_settings.find(
9310                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9311        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9312            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9313        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9314            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9315        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9316                expCompensation)) {
9317            rc = BAD_VALUE;
9318        }
9319    }
9320
9321    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9322        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9323        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9324            rc = BAD_VALUE;
9325        }
9326    }
9327    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9328        rc = setHalFpsRange(frame_settings, hal_metadata);
9329        if (rc != NO_ERROR) {
9330            LOGE("setHalFpsRange failed");
9331        }
9332    }
9333
9334    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9335        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9336        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9337            rc = BAD_VALUE;
9338        }
9339    }
9340
9341    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9342        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9343        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9344                fwk_effectMode);
9345        if (NAME_NOT_FOUND != val) {
9346            uint8_t effectMode = (uint8_t)val;
9347            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9348                rc = BAD_VALUE;
9349            }
9350        }
9351    }
9352
9353    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9354        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9355        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9356                colorCorrectMode)) {
9357            rc = BAD_VALUE;
9358        }
9359    }
9360
9361    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9362        cam_color_correct_gains_t colorCorrectGains;
9363        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9364            colorCorrectGains.gains[i] =
9365                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9366        }
9367        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9368                colorCorrectGains)) {
9369            rc = BAD_VALUE;
9370        }
9371    }
9372
9373    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9374        cam_color_correct_matrix_t colorCorrectTransform;
9375        cam_rational_type_t transform_elem;
9376        size_t num = 0;
9377        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9378           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9379              transform_elem.numerator =
9380                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9381              transform_elem.denominator =
9382                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9383              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9384              num++;
9385           }
9386        }
9387        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9388                colorCorrectTransform)) {
9389            rc = BAD_VALUE;
9390        }
9391    }
9392
9393    cam_trigger_t aecTrigger;
9394    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9395    aecTrigger.trigger_id = -1;
9396    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9397        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9398        aecTrigger.trigger =
9399            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9400        aecTrigger.trigger_id =
9401            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9402        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9403                aecTrigger)) {
9404            rc = BAD_VALUE;
9405        }
9406        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9407                aecTrigger.trigger, aecTrigger.trigger_id);
9408    }
9409
9410    /*af_trigger must come with a trigger id*/
9411    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9412        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9413        cam_trigger_t af_trigger;
9414        af_trigger.trigger =
9415            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9416        af_trigger.trigger_id =
9417            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9418        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9419            rc = BAD_VALUE;
9420        }
9421        LOGD("AfTrigger: %d AfTriggerID: %d",
9422                af_trigger.trigger, af_trigger.trigger_id);
9423    }
9424
9425    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9426        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9427        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9428            rc = BAD_VALUE;
9429        }
9430    }
9431    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9432        cam_edge_application_t edge_application;
9433        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9434        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9435            edge_application.sharpness = 0;
9436        } else {
9437            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9438        }
9439        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9440            rc = BAD_VALUE;
9441        }
9442    }
9443
9444    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9445        int32_t respectFlashMode = 1;
9446        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9447            uint8_t fwk_aeMode =
9448                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9449            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
9450                respectFlashMode = 0;
9451                LOGH("AE Mode controls flash, ignore android.flash.mode");
9452            }
9453        }
9454        if (respectFlashMode) {
9455            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9456                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9457            LOGH("flash mode after mapping %d", val);
9458            // To check: CAM_INTF_META_FLASH_MODE usage
9459            if (NAME_NOT_FOUND != val) {
9460                uint8_t flashMode = (uint8_t)val;
9461                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9462                    rc = BAD_VALUE;
9463                }
9464            }
9465        }
9466    }
9467
9468    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9469        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9470        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9471            rc = BAD_VALUE;
9472        }
9473    }
9474
9475    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9476        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9477        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9478                flashFiringTime)) {
9479            rc = BAD_VALUE;
9480        }
9481    }
9482
9483    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9484        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9485        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9486                hotPixelMode)) {
9487            rc = BAD_VALUE;
9488        }
9489    }
9490
9491    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9492        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9493        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9494                lensAperture)) {
9495            rc = BAD_VALUE;
9496        }
9497    }
9498
9499    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9500        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9501        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9502                filterDensity)) {
9503            rc = BAD_VALUE;
9504        }
9505    }
9506
9507    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9508        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9509        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9510                focalLength)) {
9511            rc = BAD_VALUE;
9512        }
9513    }
9514
9515    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9516        uint8_t optStabMode =
9517                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9518        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9519                optStabMode)) {
9520            rc = BAD_VALUE;
9521        }
9522    }
9523
9524    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9525        uint8_t videoStabMode =
9526                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9527        LOGD("videoStabMode from APP = %d", videoStabMode);
9528        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9529                videoStabMode)) {
9530            rc = BAD_VALUE;
9531        }
9532    }
9533
9534
9535    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9536        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9537        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9538                noiseRedMode)) {
9539            rc = BAD_VALUE;
9540        }
9541    }
9542
9543    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9544        float reprocessEffectiveExposureFactor =
9545            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9546        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9547                reprocessEffectiveExposureFactor)) {
9548            rc = BAD_VALUE;
9549        }
9550    }
9551
9552    cam_crop_region_t scalerCropRegion;
9553    bool scalerCropSet = false;
9554    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9555        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9556        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9557        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9558        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9559
9560        // Map coordinate system from active array to sensor output.
9561        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9562                scalerCropRegion.width, scalerCropRegion.height);
9563
9564        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9565                scalerCropRegion)) {
9566            rc = BAD_VALUE;
9567        }
9568        scalerCropSet = true;
9569    }
9570
9571    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9572        int64_t sensorExpTime =
9573                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9574        LOGD("setting sensorExpTime %lld", sensorExpTime);
9575        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9576                sensorExpTime)) {
9577            rc = BAD_VALUE;
9578        }
9579    }
9580
9581    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9582        int64_t sensorFrameDuration =
9583                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9584        int64_t minFrameDuration = getMinFrameDuration(request);
9585        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9586        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9587            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9588        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9589        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9590                sensorFrameDuration)) {
9591            rc = BAD_VALUE;
9592        }
9593    }
9594
9595    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9596        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9597        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9598                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9599        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9600                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9601        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9602        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9603                sensorSensitivity)) {
9604            rc = BAD_VALUE;
9605        }
9606    }
9607
9608    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9609        int32_t ispSensitivity =
9610            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9611        if (ispSensitivity <
9612            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9613                ispSensitivity =
9614                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9615                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9616        }
9617        if (ispSensitivity >
9618            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9619                ispSensitivity =
9620                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9621                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9622        }
9623        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9624                ispSensitivity)) {
9625            rc = BAD_VALUE;
9626        }
9627    }
9628
9629    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9630        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9631        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9632            rc = BAD_VALUE;
9633        }
9634    }
9635
9636    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9637        uint8_t fwk_facedetectMode =
9638                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9639
9640        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9641                fwk_facedetectMode);
9642
9643        if (NAME_NOT_FOUND != val) {
9644            uint8_t facedetectMode = (uint8_t)val;
9645            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9646                    facedetectMode)) {
9647                rc = BAD_VALUE;
9648            }
9649        }
9650    }
9651
9652    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9653        uint8_t histogramMode =
9654                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9655        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9656                histogramMode)) {
9657            rc = BAD_VALUE;
9658        }
9659    }
9660
9661    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9662        uint8_t sharpnessMapMode =
9663                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9664        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9665                sharpnessMapMode)) {
9666            rc = BAD_VALUE;
9667        }
9668    }
9669
9670    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9671        uint8_t tonemapMode =
9672                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9673        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9674            rc = BAD_VALUE;
9675        }
9676    }
9677    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9678    /*All tonemap channels will have the same number of points*/
9679    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9680        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9681        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9682        cam_rgb_tonemap_curves tonemapCurves;
9683        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9684        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9685            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9686                     tonemapCurves.tonemap_points_cnt,
9687                    CAM_MAX_TONEMAP_CURVE_SIZE);
9688            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9689        }
9690
9691        /* ch0 = G*/
9692        size_t point = 0;
9693        cam_tonemap_curve_t tonemapCurveGreen;
9694        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9695            for (size_t j = 0; j < 2; j++) {
9696               tonemapCurveGreen.tonemap_points[i][j] =
9697                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9698               point++;
9699            }
9700        }
9701        tonemapCurves.curves[0] = tonemapCurveGreen;
9702
9703        /* ch 1 = B */
9704        point = 0;
9705        cam_tonemap_curve_t tonemapCurveBlue;
9706        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9707            for (size_t j = 0; j < 2; j++) {
9708               tonemapCurveBlue.tonemap_points[i][j] =
9709                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9710               point++;
9711            }
9712        }
9713        tonemapCurves.curves[1] = tonemapCurveBlue;
9714
9715        /* ch 2 = R */
9716        point = 0;
9717        cam_tonemap_curve_t tonemapCurveRed;
9718        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9719            for (size_t j = 0; j < 2; j++) {
9720               tonemapCurveRed.tonemap_points[i][j] =
9721                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9722               point++;
9723            }
9724        }
9725        tonemapCurves.curves[2] = tonemapCurveRed;
9726
9727        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9728                tonemapCurves)) {
9729            rc = BAD_VALUE;
9730        }
9731    }
9732
9733    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9734        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9735        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9736                captureIntent)) {
9737            rc = BAD_VALUE;
9738        }
9739    }
9740
9741    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9742        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9743        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9744                blackLevelLock)) {
9745            rc = BAD_VALUE;
9746        }
9747    }
9748
9749    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9750        uint8_t lensShadingMapMode =
9751                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9752        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9753                lensShadingMapMode)) {
9754            rc = BAD_VALUE;
9755        }
9756    }
9757
9758    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9759        cam_area_t roi;
9760        bool reset = true;
9761        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9762
9763        // Map coordinate system from active array to sensor output.
9764        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9765                roi.rect.height);
9766
9767        if (scalerCropSet) {
9768            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9769        }
9770        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9771            rc = BAD_VALUE;
9772        }
9773    }
9774
9775    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9776        cam_area_t roi;
9777        bool reset = true;
9778        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9779
9780        // Map coordinate system from active array to sensor output.
9781        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9782                roi.rect.height);
9783
9784        if (scalerCropSet) {
9785            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9786        }
9787        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9788            rc = BAD_VALUE;
9789        }
9790    }
9791
9792    // CDS for non-HFR non-video mode
9793    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9794            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9795        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9796        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9797            LOGE("Invalid CDS mode %d!", *fwk_cds);
9798        } else {
9799            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9800                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9801                rc = BAD_VALUE;
9802            }
9803        }
9804    }
9805
9806    // TNR
9807    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9808        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9809        uint8_t b_TnrRequested = 0;
9810        cam_denoise_param_t tnr;
9811        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9812        tnr.process_plates =
9813            (cam_denoise_process_type_t)frame_settings.find(
9814            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9815        b_TnrRequested = tnr.denoise_enable;
9816        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9817            rc = BAD_VALUE;
9818        }
9819    }
9820
9821    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9822        int32_t fwk_testPatternMode =
9823                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9824        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9825                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9826
9827        if (NAME_NOT_FOUND != testPatternMode) {
9828            cam_test_pattern_data_t testPatternData;
9829            memset(&testPatternData, 0, sizeof(testPatternData));
9830            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9831            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9832                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9833                int32_t *fwk_testPatternData =
9834                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9835                testPatternData.r = fwk_testPatternData[0];
9836                testPatternData.b = fwk_testPatternData[3];
9837                switch (gCamCapability[mCameraId]->color_arrangement) {
9838                    case CAM_FILTER_ARRANGEMENT_RGGB:
9839                    case CAM_FILTER_ARRANGEMENT_GRBG:
9840                        testPatternData.gr = fwk_testPatternData[1];
9841                        testPatternData.gb = fwk_testPatternData[2];
9842                        break;
9843                    case CAM_FILTER_ARRANGEMENT_GBRG:
9844                    case CAM_FILTER_ARRANGEMENT_BGGR:
9845                        testPatternData.gr = fwk_testPatternData[2];
9846                        testPatternData.gb = fwk_testPatternData[1];
9847                        break;
9848                    default:
9849                        LOGE("color arrangement %d is not supported",
9850                                gCamCapability[mCameraId]->color_arrangement);
9851                        break;
9852                }
9853            }
9854            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9855                    testPatternData)) {
9856                rc = BAD_VALUE;
9857            }
9858        } else {
9859            LOGE("Invalid framework sensor test pattern mode %d",
9860                    fwk_testPatternMode);
9861        }
9862    }
9863
9864    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9865        size_t count = 0;
9866        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9867        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9868                gps_coords.data.d, gps_coords.count, count);
9869        if (gps_coords.count != count) {
9870            rc = BAD_VALUE;
9871        }
9872    }
9873
9874    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9875        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9876        size_t count = 0;
9877        const char *gps_methods_src = (const char *)
9878                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9879        memset(gps_methods, '\0', sizeof(gps_methods));
9880        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9881        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9882                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9883        if (GPS_PROCESSING_METHOD_SIZE != count) {
9884            rc = BAD_VALUE;
9885        }
9886    }
9887
9888    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9889        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9890        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9891                gps_timestamp)) {
9892            rc = BAD_VALUE;
9893        }
9894    }
9895
9896    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9897        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9898        cam_rotation_info_t rotation_info;
9899        if (orientation == 0) {
9900           rotation_info.rotation = ROTATE_0;
9901        } else if (orientation == 90) {
9902           rotation_info.rotation = ROTATE_90;
9903        } else if (orientation == 180) {
9904           rotation_info.rotation = ROTATE_180;
9905        } else if (orientation == 270) {
9906           rotation_info.rotation = ROTATE_270;
9907        }
9908        rotation_info.device_rotation = ROTATE_0;
9909        rotation_info.streamId = snapshotStreamId;
9910        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9911        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9912            rc = BAD_VALUE;
9913        }
9914    }
9915
9916    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9917        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9918        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9919            rc = BAD_VALUE;
9920        }
9921    }
9922
9923    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9924        uint32_t thumb_quality = (uint32_t)
9925                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9926        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9927                thumb_quality)) {
9928            rc = BAD_VALUE;
9929        }
9930    }
9931
9932    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9933        cam_dimension_t dim;
9934        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9935        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9936        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9937            rc = BAD_VALUE;
9938        }
9939    }
9940
9941    // Internal metadata
9942    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9943        size_t count = 0;
9944        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9945        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9946                privatedata.data.i32, privatedata.count, count);
9947        if (privatedata.count != count) {
9948            rc = BAD_VALUE;
9949        }
9950    }
9951
9952    // EV step
9953    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9954            gCamCapability[mCameraId]->exp_compensation_step)) {
9955        rc = BAD_VALUE;
9956    }
9957
9958    // CDS info
9959    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9960        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9961                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9962
9963        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9964                CAM_INTF_META_CDS_DATA, *cdsData)) {
9965            rc = BAD_VALUE;
9966        }
9967    }
9968
9969    // Hybrid AE
9970    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9971        uint8_t *hybrid_ae = (uint8_t *)
9972                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9973
9974        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9975                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9976            rc = BAD_VALUE;
9977        }
9978    }
9979
9980    return rc;
9981}
9982
9983/*===========================================================================
9984 * FUNCTION   : captureResultCb
9985 *
9986 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9987 *
9988 * PARAMETERS :
9989 *   @frame  : frame information from mm-camera-interface
9990 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9991 *   @userdata: userdata
9992 *
9993 * RETURN     : NONE
9994 *==========================================================================*/
9995void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9996                camera3_stream_buffer_t *buffer,
9997                uint32_t frame_number, bool isInputBuffer, void *userdata)
9998{
9999    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10000    if (hw == NULL) {
10001        LOGE("Invalid hw %p", hw);
10002        return;
10003    }
10004
10005    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
10006    return;
10007}
10008
10009/*===========================================================================
10010 * FUNCTION   : setBufferErrorStatus
10011 *
10012 * DESCRIPTION: Callback handler for channels to report any buffer errors
10013 *
10014 * PARAMETERS :
10015 *   @ch     : Channel on which buffer error is reported from
10016 *   @frame_number  : frame number on which buffer error is reported on
10017 *   @buffer_status : buffer error status
10018 *   @userdata: userdata
10019 *
10020 * RETURN     : NONE
10021 *==========================================================================*/
10022void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10023                uint32_t frame_number, camera3_buffer_status_t err,
10024                void *userdata)
10025{
10026    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10027    if (hw == NULL) {
10028        LOGE("Invalid hw %p", hw);
10029        return;
10030    }
10031
10032    hw->setBufferErrorStatus(ch, frame_number, err);
10033    return;
10034}
10035
10036void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10037                uint32_t frameNumber, camera3_buffer_status_t err)
10038{
10039    LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
10040    pthread_mutex_lock(&mMutex);
10041
10042    for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
10043        if (req.frame_number != frameNumber)
10044            continue;
10045        for (auto& k : req.mPendingBufferList) {
10046            if(k.stream->priv == ch) {
10047                k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
10048            }
10049        }
10050    }
10051
10052    pthread_mutex_unlock(&mMutex);
10053    return;
10054}
10055/*===========================================================================
10056 * FUNCTION   : initialize
10057 *
10058 * DESCRIPTION: Pass framework callback pointers to HAL
10059 *
10060 * PARAMETERS :
10061 *
10062 *
10063 * RETURN     : Success : 0
10064 *              Failure: -ENODEV
10065 *==========================================================================*/
10066
10067int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
10068                                  const camera3_callback_ops_t *callback_ops)
10069{
10070    LOGD("E");
10071    QCamera3HardwareInterface *hw =
10072        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10073    if (!hw) {
10074        LOGE("NULL camera device");
10075        return -ENODEV;
10076    }
10077
10078    int rc = hw->initialize(callback_ops);
10079    LOGD("X");
10080    return rc;
10081}
10082
10083/*===========================================================================
10084 * FUNCTION   : configure_streams
10085 *
10086 * DESCRIPTION:
10087 *
10088 * PARAMETERS :
10089 *
10090 *
10091 * RETURN     : Success: 0
10092 *              Failure: -EINVAL (if stream configuration is invalid)
10093 *                       -ENODEV (fatal error)
10094 *==========================================================================*/
10095
10096int QCamera3HardwareInterface::configure_streams(
10097        const struct camera3_device *device,
10098        camera3_stream_configuration_t *stream_list)
10099{
10100    LOGD("E");
10101    QCamera3HardwareInterface *hw =
10102        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10103    if (!hw) {
10104        LOGE("NULL camera device");
10105        return -ENODEV;
10106    }
10107    int rc = hw->configureStreams(stream_list);
10108    LOGD("X");
10109    return rc;
10110}
10111
10112/*===========================================================================
10113 * FUNCTION   : construct_default_request_settings
10114 *
10115 * DESCRIPTION: Configure a settings buffer to meet the required use case
10116 *
10117 * PARAMETERS :
10118 *
10119 *
10120 * RETURN     : Success: Return valid metadata
10121 *              Failure: Return NULL
10122 *==========================================================================*/
10123const camera_metadata_t* QCamera3HardwareInterface::
10124    construct_default_request_settings(const struct camera3_device *device,
10125                                        int type)
10126{
10127
10128    LOGD("E");
10129    camera_metadata_t* fwk_metadata = NULL;
10130    QCamera3HardwareInterface *hw =
10131        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10132    if (!hw) {
10133        LOGE("NULL camera device");
10134        return NULL;
10135    }
10136
10137    fwk_metadata = hw->translateCapabilityToMetadata(type);
10138
10139    LOGD("X");
10140    return fwk_metadata;
10141}
10142
10143/*===========================================================================
10144 * FUNCTION   : process_capture_request
10145 *
10146 * DESCRIPTION:
10147 *
10148 * PARAMETERS :
10149 *
10150 *
10151 * RETURN     :
10152 *==========================================================================*/
10153int QCamera3HardwareInterface::process_capture_request(
10154                    const struct camera3_device *device,
10155                    camera3_capture_request_t *request)
10156{
10157    LOGD("E");
10158    QCamera3HardwareInterface *hw =
10159        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10160    if (!hw) {
10161        LOGE("NULL camera device");
10162        return -EINVAL;
10163    }
10164
10165    int rc = hw->processCaptureRequest(request);
10166    LOGD("X");
10167    return rc;
10168}
10169
10170/*===========================================================================
10171 * FUNCTION   : dump
10172 *
10173 * DESCRIPTION:
10174 *
10175 * PARAMETERS :
10176 *
10177 *
10178 * RETURN     :
10179 *==========================================================================*/
10180
10181void QCamera3HardwareInterface::dump(
10182                const struct camera3_device *device, int fd)
10183{
10184    /* Log level property is read when "adb shell dumpsys media.camera" is
10185       called so that the log level can be controlled without restarting
10186       the media server */
10187    getLogLevel();
10188
10189    LOGD("E");
10190    QCamera3HardwareInterface *hw =
10191        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10192    if (!hw) {
10193        LOGE("NULL camera device");
10194        return;
10195    }
10196
10197    hw->dump(fd);
10198    LOGD("X");
10199    return;
10200}
10201
10202/*===========================================================================
10203 * FUNCTION   : flush
10204 *
10205 * DESCRIPTION:
10206 *
10207 * PARAMETERS :
10208 *
10209 *
10210 * RETURN     :
10211 *==========================================================================*/
10212
10213int QCamera3HardwareInterface::flush(
10214                const struct camera3_device *device)
10215{
10216    int rc;
10217    LOGD("E");
10218    QCamera3HardwareInterface *hw =
10219        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10220    if (!hw) {
10221        LOGE("NULL camera device");
10222        return -EINVAL;
10223    }
10224
10225    pthread_mutex_lock(&hw->mMutex);
10226    // Validate current state
10227    switch (hw->mState) {
10228        case STARTED:
10229            /* valid state */
10230            break;
10231
10232        case ERROR:
10233            pthread_mutex_unlock(&hw->mMutex);
10234            hw->handleCameraDeviceError();
10235            return -ENODEV;
10236
10237        default:
10238            LOGI("Flush returned during state %d", hw->mState);
10239            pthread_mutex_unlock(&hw->mMutex);
10240            return 0;
10241    }
10242    pthread_mutex_unlock(&hw->mMutex);
10243
10244    rc = hw->flush(true /* restart channels */ );
10245    LOGD("X");
10246    return rc;
10247}
10248
10249/*===========================================================================
10250 * FUNCTION   : close_camera_device
10251 *
10252 * DESCRIPTION:
10253 *
10254 * PARAMETERS :
10255 *
10256 *
10257 * RETURN     :
10258 *==========================================================================*/
10259int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10260{
10261    int ret = NO_ERROR;
10262    QCamera3HardwareInterface *hw =
10263        reinterpret_cast<QCamera3HardwareInterface *>(
10264            reinterpret_cast<camera3_device_t *>(device)->priv);
10265    if (!hw) {
10266        LOGE("NULL camera device");
10267        return BAD_VALUE;
10268    }
10269
10270    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10271    delete hw;
10272    LOGI("[KPI Perf]: X");
10273    return ret;
10274}
10275
10276/*===========================================================================
10277 * FUNCTION   : getWaveletDenoiseProcessPlate
10278 *
10279 * DESCRIPTION: query wavelet denoise process plate
10280 *
10281 * PARAMETERS : None
10282 *
10283 * RETURN     : WNR prcocess plate value
10284 *==========================================================================*/
10285cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10286{
10287    char prop[PROPERTY_VALUE_MAX];
10288    memset(prop, 0, sizeof(prop));
10289    property_get("persist.denoise.process.plates", prop, "0");
10290    int processPlate = atoi(prop);
10291    switch(processPlate) {
10292    case 0:
10293        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10294    case 1:
10295        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10296    case 2:
10297        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10298    case 3:
10299        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10300    default:
10301        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10302    }
10303}
10304
10305
10306/*===========================================================================
10307 * FUNCTION   : getTemporalDenoiseProcessPlate
10308 *
10309 * DESCRIPTION: query temporal denoise process plate
10310 *
10311 * PARAMETERS : None
10312 *
10313 * RETURN     : TNR prcocess plate value
10314 *==========================================================================*/
10315cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10316{
10317    char prop[PROPERTY_VALUE_MAX];
10318    memset(prop, 0, sizeof(prop));
10319    property_get("persist.tnr.process.plates", prop, "0");
10320    int processPlate = atoi(prop);
10321    switch(processPlate) {
10322    case 0:
10323        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10324    case 1:
10325        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10326    case 2:
10327        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10328    case 3:
10329        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10330    default:
10331        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10332    }
10333}
10334
10335
10336/*===========================================================================
10337 * FUNCTION   : extractSceneMode
10338 *
10339 * DESCRIPTION: Extract scene mode from frameworks set metadata
10340 *
10341 * PARAMETERS :
10342 *      @frame_settings: CameraMetadata reference
10343 *      @metaMode: ANDROID_CONTORL_MODE
10344 *      @hal_metadata: hal metadata structure
10345 *
10346 * RETURN     : None
10347 *==========================================================================*/
10348int32_t QCamera3HardwareInterface::extractSceneMode(
10349        const CameraMetadata &frame_settings, uint8_t metaMode,
10350        metadata_buffer_t *hal_metadata)
10351{
10352    int32_t rc = NO_ERROR;
10353
10354    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10355        camera_metadata_ro_entry entry =
10356                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10357        if (0 == entry.count)
10358            return rc;
10359
10360        uint8_t fwk_sceneMode = entry.data.u8[0];
10361
10362        int val = lookupHalName(SCENE_MODES_MAP,
10363                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10364                fwk_sceneMode);
10365        if (NAME_NOT_FOUND != val) {
10366            uint8_t sceneMode = (uint8_t)val;
10367            LOGD("sceneMode: %d", sceneMode);
10368            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10369                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10370                rc = BAD_VALUE;
10371            }
10372        }
10373    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10374            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10375        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10376        LOGD("sceneMode: %d", sceneMode);
10377        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10378                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10379            rc = BAD_VALUE;
10380        }
10381    }
10382    return rc;
10383}
10384
10385/*===========================================================================
10386 * FUNCTION   : needRotationReprocess
10387 *
10388 * DESCRIPTION: if rotation needs to be done by reprocess in pp
10389 *
10390 * PARAMETERS : none
10391 *
10392 * RETURN     : true: needed
10393 *              false: no need
10394 *==========================================================================*/
10395bool QCamera3HardwareInterface::needRotationReprocess()
10396{
10397    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10398        // current rotation is not zero, and pp has the capability to process rotation
10399        LOGH("need do reprocess for rotation");
10400        return true;
10401    }
10402
10403    return false;
10404}
10405
10406/*===========================================================================
10407 * FUNCTION   : needReprocess
10408 *
10409 * DESCRIPTION: if reprocess in needed
10410 *
10411 * PARAMETERS : none
10412 *
10413 * RETURN     : true: needed
10414 *              false: no need
10415 *==========================================================================*/
10416bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10417{
10418    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10419        // TODO: add for ZSL HDR later
10420        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10421        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10422            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10423            return true;
10424        } else {
10425            LOGH("already post processed frame");
10426            return false;
10427        }
10428    }
10429    return needRotationReprocess();
10430}
10431
10432/*===========================================================================
10433 * FUNCTION   : needJpegExifRotation
10434 *
10435 * DESCRIPTION: if rotation from jpeg is needed
10436 *
10437 * PARAMETERS : none
10438 *
10439 * RETURN     : true: needed
10440 *              false: no need
10441 *==========================================================================*/
10442bool QCamera3HardwareInterface::needJpegExifRotation()
10443{
10444   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10445    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10446       LOGD("Need use Jpeg EXIF Rotation");
10447       return true;
10448    }
10449    return false;
10450}
10451
10452/*===========================================================================
10453 * FUNCTION   : addOfflineReprocChannel
10454 *
10455 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10456 *              coming from input channel
10457 *
10458 * PARAMETERS :
10459 *   @config  : reprocess configuration
10460 *   @inputChHandle : pointer to the input (source) channel
10461 *
10462 *
10463 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
10464 *==========================================================================*/
10465QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10466        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10467{
10468    int32_t rc = NO_ERROR;
10469    QCamera3ReprocessChannel *pChannel = NULL;
10470
10471    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10472            mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
10473            config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10474    if (NULL == pChannel) {
10475        LOGE("no mem for reprocess channel");
10476        return NULL;
10477    }
10478
10479    rc = pChannel->initialize(IS_TYPE_NONE);
10480    if (rc != NO_ERROR) {
10481        LOGE("init reprocess channel failed, ret = %d", rc);
10482        delete pChannel;
10483        return NULL;
10484    }
10485
10486    // pp feature config
10487    cam_pp_feature_config_t pp_config;
10488    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10489
10490    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10491    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10492            & CAM_QCOM_FEATURE_DSDN) {
10493        //Use CPP CDS incase h/w supports it.
10494        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10495        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10496    }
10497    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10498        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10499    }
10500
10501    rc = pChannel->addReprocStreamsFromSource(pp_config,
10502            config,
10503            IS_TYPE_NONE,
10504            mMetadataChannel);
10505
10506    if (rc != NO_ERROR) {
10507        delete pChannel;
10508        return NULL;
10509    }
10510    return pChannel;
10511}
10512
10513/*===========================================================================
10514 * FUNCTION   : getMobicatMask
10515 *
10516 * DESCRIPTION: returns mobicat mask
10517 *
10518 * PARAMETERS : none
10519 *
10520 * RETURN     : mobicat mask
10521 *
10522 *==========================================================================*/
10523uint8_t QCamera3HardwareInterface::getMobicatMask()
10524{
10525    return m_MobicatMask;
10526}
10527
10528/*===========================================================================
10529 * FUNCTION   : setMobicat
10530 *
10531 * DESCRIPTION: set Mobicat on/off.
10532 *
10533 * PARAMETERS :
10534 *   @params  : none
10535 *
10536 * RETURN     : int32_t type of status
10537 *              NO_ERROR  -- success
10538 *              none-zero failure code
10539 *==========================================================================*/
10540int32_t QCamera3HardwareInterface::setMobicat()
10541{
10542    char value [PROPERTY_VALUE_MAX];
10543    property_get("persist.camera.mobicat", value, "0");
10544    int32_t ret = NO_ERROR;
10545    uint8_t enableMobi = (uint8_t)atoi(value);
10546
10547    if (enableMobi) {
10548        tune_cmd_t tune_cmd;
10549        tune_cmd.type = SET_RELOAD_CHROMATIX;
10550        tune_cmd.module = MODULE_ALL;
10551        tune_cmd.value = TRUE;
10552        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10553                CAM_INTF_PARM_SET_VFE_COMMAND,
10554                tune_cmd);
10555
10556        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10557                CAM_INTF_PARM_SET_PP_COMMAND,
10558                tune_cmd);
10559    }
10560    m_MobicatMask = enableMobi;
10561
10562    return ret;
10563}
10564
10565/*===========================================================================
10566* FUNCTION   : getLogLevel
10567*
10568* DESCRIPTION: Reads the log level property into a variable
10569*
10570* PARAMETERS :
10571*   None
10572*
10573* RETURN     :
10574*   None
10575*==========================================================================*/
10576void QCamera3HardwareInterface::getLogLevel()
10577{
10578    char prop[PROPERTY_VALUE_MAX];
10579    uint32_t globalLogLevel = 0;
10580
10581    property_get("persist.camera.hal.debug", prop, "0");
10582    int val = atoi(prop);
10583    if (0 <= val) {
10584        gCamHal3LogLevel = (uint32_t)val;
10585    }
10586
10587    property_get("persist.camera.kpi.debug", prop, "1");
10588    gKpiDebugLevel = atoi(prop);
10589
10590    property_get("persist.camera.global.debug", prop, "0");
10591    val = atoi(prop);
10592    if (0 <= val) {
10593        globalLogLevel = (uint32_t)val;
10594    }
10595
10596    /* Highest log level among hal.logs and global.logs is selected */
10597    if (gCamHal3LogLevel < globalLogLevel)
10598        gCamHal3LogLevel = globalLogLevel;
10599
10600    return;
10601}
10602
10603/*===========================================================================
10604 * FUNCTION   : validateStreamRotations
10605 *
10606 * DESCRIPTION: Check if the rotations requested are supported
10607 *
10608 * PARAMETERS :
10609 *   @stream_list : streams to be configured
10610 *
10611 * RETURN     : NO_ERROR on success
10612 *              -EINVAL on failure
10613 *
10614 *==========================================================================*/
10615int QCamera3HardwareInterface::validateStreamRotations(
10616        camera3_stream_configuration_t *streamList)
10617{
10618    int rc = NO_ERROR;
10619
10620    /*
10621    * Loop through all streams requested in configuration
10622    * Check if unsupported rotations have been requested on any of them
10623    */
10624    for (size_t j = 0; j < streamList->num_streams; j++){
10625        camera3_stream_t *newStream = streamList->streams[j];
10626
10627        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10628        bool isImplDef = (newStream->format ==
10629                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10630        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10631                isImplDef);
10632
10633        if (isRotated && (!isImplDef || isZsl)) {
10634            LOGE("Error: Unsupported rotation of %d requested for stream"
10635                    "type:%d and stream format:%d",
10636                    newStream->rotation, newStream->stream_type,
10637                    newStream->format);
10638            rc = -EINVAL;
10639            break;
10640        }
10641    }
10642
10643    return rc;
10644}
10645
10646/*===========================================================================
10647* FUNCTION   : getFlashInfo
10648*
10649* DESCRIPTION: Retrieve information about whether the device has a flash.
10650*
10651* PARAMETERS :
10652*   @cameraId  : Camera id to query
10653*   @hasFlash  : Boolean indicating whether there is a flash device
10654*                associated with given camera
10655*   @flashNode : If a flash device exists, this will be its device node.
10656*
10657* RETURN     :
10658*   None
10659*==========================================================================*/
10660void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10661        bool& hasFlash,
10662        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10663{
10664    cam_capability_t* camCapability = gCamCapability[cameraId];
10665    if (NULL == camCapability) {
10666        hasFlash = false;
10667        flashNode[0] = '\0';
10668    } else {
10669        hasFlash = camCapability->flash_available;
10670        strlcpy(flashNode,
10671                (char*)camCapability->flash_dev_name,
10672                QCAMERA_MAX_FILEPATH_LENGTH);
10673    }
10674}
10675
10676/*===========================================================================
10677* FUNCTION   : getEepromVersionInfo
10678*
10679* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10680*
10681* PARAMETERS : None
10682*
10683* RETURN     : string describing EEPROM version
10684*              "\0" if no such info available
10685*==========================================================================*/
10686const char *QCamera3HardwareInterface::getEepromVersionInfo()
10687{
10688    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10689}
10690
10691/*===========================================================================
10692* FUNCTION   : getLdafCalib
10693*
10694* DESCRIPTION: Retrieve Laser AF calibration data
10695*
10696* PARAMETERS : None
10697*
10698* RETURN     : Two uint32_t describing laser AF calibration data
10699*              NULL if none is available.
10700*==========================================================================*/
10701const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10702{
10703    if (mLdafCalibExist) {
10704        return &mLdafCalib[0];
10705    } else {
10706        return NULL;
10707    }
10708}
10709
10710/*===========================================================================
10711 * FUNCTION   : dynamicUpdateMetaStreamInfo
10712 *
10713 * DESCRIPTION: This function:
10714 *             (1) stops all the channels
10715 *             (2) returns error on pending requests and buffers
10716 *             (3) sends metastream_info in setparams
10717 *             (4) starts all channels
10718 *             This is useful when sensor has to be restarted to apply any
10719 *             settings such as frame rate from a different sensor mode
10720 *
10721 * PARAMETERS : None
10722 *
10723 * RETURN     : NO_ERROR on success
10724 *              Error codes on failure
10725 *
10726 *==========================================================================*/
10727int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10728{
10729    ATRACE_CALL();
10730    int rc = NO_ERROR;
10731
10732    LOGD("E");
10733
10734    rc = stopAllChannels();
10735    if (rc < 0) {
10736        LOGE("stopAllChannels failed");
10737        return rc;
10738    }
10739
10740    rc = notifyErrorForPendingRequests();
10741    if (rc < 0) {
10742        LOGE("notifyErrorForPendingRequests failed");
10743        return rc;
10744    }
10745
10746    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10747        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10748                "Format:%d",
10749                mStreamConfigInfo.type[i],
10750                mStreamConfigInfo.stream_sizes[i].width,
10751                mStreamConfigInfo.stream_sizes[i].height,
10752                mStreamConfigInfo.postprocess_mask[i],
10753                mStreamConfigInfo.format[i]);
10754    }
10755
10756    /* Send meta stream info once again so that ISP can start */
10757    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10758            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10759    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10760            mParameters);
10761    if (rc < 0) {
10762        LOGE("set Metastreaminfo failed. Sensor mode does not change");
10763    }
10764
10765    rc = startAllChannels();
10766    if (rc < 0) {
10767        LOGE("startAllChannels failed");
10768        return rc;
10769    }
10770
10771    LOGD("X");
10772    return rc;
10773}
10774
10775/*===========================================================================
10776 * FUNCTION   : stopAllChannels
10777 *
10778 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10779 *
10780 * PARAMETERS : None
10781 *
10782 * RETURN     : NO_ERROR on success
10783 *              Error codes on failure
10784 *
10785 *==========================================================================*/
10786int32_t QCamera3HardwareInterface::stopAllChannels()
10787{
10788    int32_t rc = NO_ERROR;
10789
10790    LOGD("Stopping all channels");
10791    // Stop the Streams/Channels
10792    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10793        it != mStreamInfo.end(); it++) {
10794        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10795        if (channel) {
10796            channel->stop();
10797        }
10798        (*it)->status = INVALID;
10799    }
10800
10801    if (mSupportChannel) {
10802        mSupportChannel->stop();
10803    }
10804    if (mAnalysisChannel) {
10805        mAnalysisChannel->stop();
10806    }
10807    if (mRawDumpChannel) {
10808        mRawDumpChannel->stop();
10809    }
10810    if (mMetadataChannel) {
10811        /* If content of mStreamInfo is not 0, there is metadata stream */
10812        mMetadataChannel->stop();
10813    }
10814
10815    LOGD("All channels stopped");
10816    return rc;
10817}
10818
10819/*===========================================================================
10820 * FUNCTION   : startAllChannels
10821 *
10822 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10823 *
10824 * PARAMETERS : None
10825 *
10826 * RETURN     : NO_ERROR on success
10827 *              Error codes on failure
10828 *
10829 *==========================================================================*/
10830int32_t QCamera3HardwareInterface::startAllChannels()
10831{
10832    int32_t rc = NO_ERROR;
10833
10834    LOGD("Start all channels ");
10835    // Start the Streams/Channels
10836    if (mMetadataChannel) {
10837        /* If content of mStreamInfo is not 0, there is metadata stream */
10838        rc = mMetadataChannel->start();
10839        if (rc < 0) {
10840            LOGE("META channel start failed");
10841            return rc;
10842        }
10843    }
10844    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10845        it != mStreamInfo.end(); it++) {
10846        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10847        if (channel) {
10848            rc = channel->start();
10849            if (rc < 0) {
10850                LOGE("channel start failed");
10851                return rc;
10852            }
10853        }
10854    }
10855    if (mAnalysisChannel) {
10856        mAnalysisChannel->start();
10857    }
10858    if (mSupportChannel) {
10859        rc = mSupportChannel->start();
10860        if (rc < 0) {
10861            LOGE("Support channel start failed");
10862            return rc;
10863        }
10864    }
10865    if (mRawDumpChannel) {
10866        rc = mRawDumpChannel->start();
10867        if (rc < 0) {
10868            LOGE("RAW dump channel start failed");
10869            return rc;
10870        }
10871    }
10872
10873    LOGD("All channels started");
10874    return rc;
10875}
10876
10877/*===========================================================================
10878 * FUNCTION   : notifyErrorForPendingRequests
10879 *
10880 * DESCRIPTION: This function sends error for all the pending requests/buffers
10881 *
10882 * PARAMETERS : None
10883 *
10884 * RETURN     : Error codes
10885 *              NO_ERROR on success
10886 *
10887 *==========================================================================*/
10888int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10889{
10890    int32_t rc = NO_ERROR;
10891    unsigned int frameNum = 0;
10892    camera3_capture_result_t result;
10893    camera3_stream_buffer_t *pStream_Buf = NULL;
10894
10895    memset(&result, 0, sizeof(camera3_capture_result_t));
10896
10897    if (mPendingRequestsList.size() > 0) {
10898        pendingRequestIterator i = mPendingRequestsList.begin();
10899        frameNum = i->frame_number;
10900    } else {
10901        /* There might still be pending buffers even though there are
10902         no pending requests. Setting the frameNum to MAX so that
10903         all the buffers with smaller frame numbers are returned */
10904        frameNum = UINT_MAX;
10905    }
10906
10907    LOGH("Oldest frame num on mPendingRequestsList = %u",
10908       frameNum);
10909
10910    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10911            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10912
10913        if (req->frame_number < frameNum) {
10914            // Send Error notify to frameworks for each buffer for which
10915            // metadata buffer is already sent
10916            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10917                req->frame_number, req->mPendingBufferList.size());
10918
10919            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10920            if (NULL == pStream_Buf) {
10921                LOGE("No memory for pending buffers array");
10922                return NO_MEMORY;
10923            }
10924            memset(pStream_Buf, 0,
10925                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10926            result.result = NULL;
10927            result.frame_number = req->frame_number;
10928            result.num_output_buffers = req->mPendingBufferList.size();
10929            result.output_buffers = pStream_Buf;
10930
10931            size_t index = 0;
10932            for (auto info = req->mPendingBufferList.begin();
10933                info != req->mPendingBufferList.end(); ) {
10934
10935                camera3_notify_msg_t notify_msg;
10936                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10937                notify_msg.type = CAMERA3_MSG_ERROR;
10938                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10939                notify_msg.message.error.error_stream = info->stream;
10940                notify_msg.message.error.frame_number = req->frame_number;
10941                pStream_Buf[index].acquire_fence = -1;
10942                pStream_Buf[index].release_fence = -1;
10943                pStream_Buf[index].buffer = info->buffer;
10944                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10945                pStream_Buf[index].stream = info->stream;
10946                mCallbackOps->notify(mCallbackOps, &notify_msg);
10947                index++;
10948                // Remove buffer from list
10949                info = req->mPendingBufferList.erase(info);
10950            }
10951
10952            // Remove this request from Map
10953            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10954                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10955            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10956
10957            mCallbackOps->process_capture_result(mCallbackOps, &result);
10958
10959            delete [] pStream_Buf;
10960        } else {
10961
10962            // Go through the pending requests info and send error request to framework
10963            LOGE("Sending ERROR REQUEST for all pending requests");
10964            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10965
10966            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10967
10968            // Send error notify to frameworks
10969            camera3_notify_msg_t notify_msg;
10970            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10971            notify_msg.type = CAMERA3_MSG_ERROR;
10972            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10973            notify_msg.message.error.error_stream = NULL;
10974            notify_msg.message.error.frame_number = req->frame_number;
10975            mCallbackOps->notify(mCallbackOps, &notify_msg);
10976
10977            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10978            if (NULL == pStream_Buf) {
10979                LOGE("No memory for pending buffers array");
10980                return NO_MEMORY;
10981            }
10982            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10983
10984            result.result = NULL;
10985            result.frame_number = req->frame_number;
10986            result.input_buffer = i->input_buffer;
10987            result.num_output_buffers = req->mPendingBufferList.size();
10988            result.output_buffers = pStream_Buf;
10989
10990            size_t index = 0;
10991            for (auto info = req->mPendingBufferList.begin();
10992                info != req->mPendingBufferList.end(); ) {
10993                pStream_Buf[index].acquire_fence = -1;
10994                pStream_Buf[index].release_fence = -1;
10995                pStream_Buf[index].buffer = info->buffer;
10996                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10997                pStream_Buf[index].stream = info->stream;
10998                index++;
10999                // Remove buffer from list
11000                info = req->mPendingBufferList.erase(info);
11001            }
11002
11003            // Remove this request from Map
11004            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11005                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11006            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11007
11008            mCallbackOps->process_capture_result(mCallbackOps, &result);
11009            delete [] pStream_Buf;
11010            i = erasePendingRequest(i);
11011        }
11012    }
11013
11014    /* Reset pending frame Drop list and requests list */
11015    mPendingFrameDropList.clear();
11016
11017    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
11018        req.mPendingBufferList.clear();
11019    }
11020    mPendingBuffersMap.mPendingBuffersInRequest.clear();
11021    mPendingReprocessResultList.clear();
11022    LOGH("Cleared all the pending buffers ");
11023
11024    return rc;
11025}
11026
11027bool QCamera3HardwareInterface::isOnEncoder(
11028        const cam_dimension_t max_viewfinder_size,
11029        uint32_t width, uint32_t height)
11030{
11031    return (width > (uint32_t)max_viewfinder_size.width ||
11032            height > (uint32_t)max_viewfinder_size.height);
11033}
11034
11035/*===========================================================================
11036 * FUNCTION   : setBundleInfo
11037 *
11038 * DESCRIPTION: Set bundle info for all streams that are bundle.
11039 *
11040 * PARAMETERS : None
11041 *
11042 * RETURN     : NO_ERROR on success
11043 *              Error codes on failure
11044 *==========================================================================*/
11045int32_t QCamera3HardwareInterface::setBundleInfo()
11046{
11047    int32_t rc = NO_ERROR;
11048
11049    if (mChannelHandle) {
11050        cam_bundle_config_t bundleInfo;
11051        memset(&bundleInfo, 0, sizeof(bundleInfo));
11052        rc = mCameraHandle->ops->get_bundle_info(
11053                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
11054        if (rc != NO_ERROR) {
11055            LOGE("get_bundle_info failed");
11056            return rc;
11057        }
11058        if (mAnalysisChannel) {
11059            mAnalysisChannel->setBundleInfo(bundleInfo);
11060        }
11061        if (mSupportChannel) {
11062            mSupportChannel->setBundleInfo(bundleInfo);
11063        }
11064        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11065                it != mStreamInfo.end(); it++) {
11066            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11067            channel->setBundleInfo(bundleInfo);
11068        }
11069        if (mRawDumpChannel) {
11070            mRawDumpChannel->setBundleInfo(bundleInfo);
11071        }
11072    }
11073
11074    return rc;
11075}
11076
11077/*===========================================================================
11078 * FUNCTION   : get_num_overall_buffers
11079 *
11080 * DESCRIPTION: Estimate number of pending buffers across all requests.
11081 *
11082 * PARAMETERS : None
11083 *
11084 * RETURN     : Number of overall pending buffers
11085 *
11086 *==========================================================================*/
11087uint32_t PendingBuffersMap::get_num_overall_buffers()
11088{
11089    uint32_t sum_buffers = 0;
11090    for (auto &req : mPendingBuffersInRequest) {
11091        sum_buffers += req.mPendingBufferList.size();
11092    }
11093    return sum_buffers;
11094}
11095
11096/*===========================================================================
11097 * FUNCTION   : removeBuf
11098 *
11099 * DESCRIPTION: Remove a matching buffer from tracker.
11100 *
11101 * PARAMETERS : @buffer: image buffer for the callback
11102 *
11103 * RETURN     : None
11104 *
11105 *==========================================================================*/
11106void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
11107{
11108    bool buffer_found = false;
11109    for (auto req = mPendingBuffersInRequest.begin();
11110            req != mPendingBuffersInRequest.end(); req++) {
11111        for (auto k = req->mPendingBufferList.begin();
11112                k != req->mPendingBufferList.end(); k++ ) {
11113            if (k->buffer == buffer) {
11114                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
11115                        req->frame_number, buffer);
11116                k = req->mPendingBufferList.erase(k);
11117                if (req->mPendingBufferList.empty()) {
11118                    // Remove this request from Map
11119                    req = mPendingBuffersInRequest.erase(req);
11120                }
11121                buffer_found = true;
11122                break;
11123            }
11124        }
11125        if (buffer_found) {
11126            break;
11127        }
11128    }
11129    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11130            get_num_overall_buffers());
11131}
11132
11133/*===========================================================================
11134 * FUNCTION   : getBufErrStatus
11135 *
11136 * DESCRIPTION: get buffer error status
11137 *
11138 * PARAMETERS : @buffer: buffer handle
11139 *
11140 * RETURN     : None
11141 *
11142 *==========================================================================*/
11143int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
11144{
11145    for (auto& req : mPendingBuffersInRequest) {
11146        for (auto& k : req.mPendingBufferList) {
11147            if (k.buffer == buffer)
11148                return k.bufStatus;
11149        }
11150    }
11151    return CAMERA3_BUFFER_STATUS_OK;
11152}
11153
11154/*===========================================================================
11155 * FUNCTION   : setPAAFSupport
11156 *
11157 * DESCRIPTION: Set the preview-assisted auto focus support bit in
11158 *              feature mask according to stream type and filter
11159 *              arrangement
11160 *
11161 * PARAMETERS : @feature_mask: current feature mask, which may be modified
11162 *              @stream_type: stream type
11163 *              @filter_arrangement: filter arrangement
11164 *
11165 * RETURN     : None
11166 *==========================================================================*/
11167void QCamera3HardwareInterface::setPAAFSupport(
11168        cam_feature_mask_t& feature_mask,
11169        cam_stream_type_t stream_type,
11170        cam_color_filter_arrangement_t filter_arrangement)
11171{
11172    switch (filter_arrangement) {
11173    case CAM_FILTER_ARRANGEMENT_RGGB:
11174    case CAM_FILTER_ARRANGEMENT_GRBG:
11175    case CAM_FILTER_ARRANGEMENT_GBRG:
11176    case CAM_FILTER_ARRANGEMENT_BGGR:
11177        if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11178                (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11179            feature_mask |= CAM_QCOM_FEATURE_PAAF;
11180        }
11181        break;
11182    case CAM_FILTER_ARRANGEMENT_Y:
11183        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11184            feature_mask |= CAM_QCOM_FEATURE_PAAF;
11185        }
11186        break;
11187    default:
11188        break;
11189    }
11190    LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11191            feature_mask, stream_type, filter_arrangement);
11192
11193
11194}
11195
11196/*===========================================================================
11197 * FUNCTION   : adjustBlackLevelForCFA
11198 *
11199 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
11200 *              of bayer CFA (Color Filter Array).
11201 *
11202 * PARAMETERS : @input: black level pattern in the order of RGGB
11203 *              @output: black level pattern in the order of CFA
11204 *              @color_arrangement: CFA color arrangement
11205 *
11206 * RETURN     : None
11207 *==========================================================================*/
11208template<typename T>
11209void QCamera3HardwareInterface::adjustBlackLevelForCFA(
11210        T input[BLACK_LEVEL_PATTERN_CNT],
11211        T output[BLACK_LEVEL_PATTERN_CNT],
11212        cam_color_filter_arrangement_t color_arrangement)
11213{
11214    switch (color_arrangement) {
11215    case CAM_FILTER_ARRANGEMENT_GRBG:
11216        output[0] = input[1];
11217        output[1] = input[0];
11218        output[2] = input[3];
11219        output[3] = input[2];
11220        break;
11221    case CAM_FILTER_ARRANGEMENT_GBRG:
11222        output[0] = input[2];
11223        output[1] = input[3];
11224        output[2] = input[0];
11225        output[3] = input[1];
11226        break;
11227    case CAM_FILTER_ARRANGEMENT_BGGR:
11228        output[0] = input[3];
11229        output[1] = input[2];
11230        output[2] = input[1];
11231        output[3] = input[0];
11232        break;
11233    case CAM_FILTER_ARRANGEMENT_RGGB:
11234        output[0] = input[0];
11235        output[1] = input[1];
11236        output[2] = input[2];
11237        output[3] = input[3];
11238        break;
11239    default:
11240        LOGE("Invalid color arrangement to derive dynamic blacklevel");
11241        break;
11242    }
11243}
11244
11245/*===========================================================================
11246 * FUNCTION   : is60HzZone
11247 *
11248 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
11249 *
11250 * PARAMETERS : None
11251 *
11252 * RETURN     : True if in 60Hz zone, False otherwise
11253 *==========================================================================*/
11254bool QCamera3HardwareInterface::is60HzZone()
11255{
11256    time_t t = time(NULL);
11257    struct tm lt;
11258
11259    struct tm* r = localtime_r(&t, &lt);
11260
11261    if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
11262        return true;
11263    else
11264        return false;
11265}
11266}; //end namespace qcamera
11267