QCamera3HWI.cpp revision 2bdb93f14774fa8fd70334150e43e12e1423471f
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <sync/sync.h>
44#include "gralloc_priv.h"
45
46// Display dependencies
47#include "qdMetaData.h"
48
49// Camera dependencies
50#include "android/QCamera3External.h"
51#include "util/QCameraFlash.h"
52#include "QCamera3HWI.h"
53#include "QCamera3VendorTags.h"
54#include "QCameraTrace.h"
55
56extern "C" {
57#include "mm_camera_dbg.h"
58}
59
60using namespace android;
61
62namespace qcamera {
63
64#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
65
66#define EMPTY_PIPELINE_DELAY 2
67#define PARTIAL_RESULT_COUNT 2
68#define FRAME_SKIP_DELAY     0
69
70#define MAX_VALUE_8BIT ((1<<8)-1)
71#define MAX_VALUE_10BIT ((1<<10)-1)
72#define MAX_VALUE_12BIT ((1<<12)-1)
73
74#define VIDEO_4K_WIDTH  3840
75#define VIDEO_4K_HEIGHT 2160
76
77#define MAX_EIS_WIDTH 3840
78#define MAX_EIS_HEIGHT 2160
79
80#define MAX_RAW_STREAMS        1
81#define MAX_STALLING_STREAMS   1
82#define MAX_PROCESSED_STREAMS  3
83/* Batch mode is enabled only if FPS set is equal to or greater than this */
84#define MIN_FPS_FOR_BATCH_MODE (120)
85#define PREVIEW_FPS_FOR_HFR    (30)
86#define DEFAULT_VIDEO_FPS      (30.0)
87#define MAX_HFR_BATCH_SIZE     (8)
88#define REGIONS_TUPLE_COUNT    5
89#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
90#define BURST_REPROCESS_PERF_TIME_OUT  (1000) // milliseconds
91// Set a threshold for detection of missing buffers //seconds
92#define MISSING_REQUEST_BUF_TIMEOUT 3
93#define FLUSH_TIMEOUT 3
94#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
95
96#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
97                                              CAM_QCOM_FEATURE_CROP |\
98                                              CAM_QCOM_FEATURE_ROTATION |\
99                                              CAM_QCOM_FEATURE_SHARPNESS |\
100                                              CAM_QCOM_FEATURE_SCALE |\
101                                              CAM_QCOM_FEATURE_CAC |\
102                                              CAM_QCOM_FEATURE_CDS )
103/* Per configuration size for static metadata length*/
104#define PER_CONFIGURATION_SIZE_3 (3)
105
106#define TIMEOUT_NEVER -1
107
108cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
109const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
110extern pthread_mutex_t gCamLock;
111volatile uint32_t gCamHal3LogLevel = 1;
112extern uint8_t gNumCameraSessions;
113
114const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
115    {"On",  CAM_CDS_MODE_ON},
116    {"Off", CAM_CDS_MODE_OFF},
117    {"Auto",CAM_CDS_MODE_AUTO}
118};
119
120const QCamera3HardwareInterface::QCameraMap<
121        camera_metadata_enum_android_control_effect_mode_t,
122        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
123    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
124    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
125    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
126    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
127    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
128    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
129    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
130    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
132};
133
134const QCamera3HardwareInterface::QCameraMap<
135        camera_metadata_enum_android_control_awb_mode_t,
136        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
137    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
138    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
139    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
140    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
141    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
142    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
143    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
145    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
146};
147
148const QCamera3HardwareInterface::QCameraMap<
149        camera_metadata_enum_android_control_scene_mode_t,
150        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
151    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
152    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
153    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
154    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
155    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
157    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
158    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
159    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
160    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
161    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
162    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
163    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
164    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
165    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
166    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
167};
168
169const QCamera3HardwareInterface::QCameraMap<
170        camera_metadata_enum_android_control_af_mode_t,
171        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
172    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
174    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
175    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
176    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
177    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_color_correction_aberration_mode_t,
183        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
184    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
185            CAM_COLOR_CORRECTION_ABERRATION_OFF },
186    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
187            CAM_COLOR_CORRECTION_ABERRATION_FAST },
188    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
189            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
190};
191
192const QCamera3HardwareInterface::QCameraMap<
193        camera_metadata_enum_android_control_ae_antibanding_mode_t,
194        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
195    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_control_ae_mode_t,
203        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
204    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
205    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
207    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
208    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
209};
210
211const QCamera3HardwareInterface::QCameraMap<
212        camera_metadata_enum_android_flash_mode_t,
213        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
214    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
215    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
216    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220        camera_metadata_enum_android_statistics_face_detect_mode_t,
221        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
222    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
229        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
230    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
231      CAM_FOCUS_UNCALIBRATED },
232    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
233      CAM_FOCUS_APPROXIMATE },
234    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
235      CAM_FOCUS_CALIBRATED }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239        camera_metadata_enum_android_lens_state_t,
240        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
241    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
242    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
243};
244
245const int32_t available_thumbnail_sizes[] = {0, 0,
246                                             176, 144,
247                                             240, 144,
248                                             256, 144,
249                                             240, 160,
250                                             256, 154,
251                                             240, 240,
252                                             320, 240};
253
254const cam_dimension_t default_hfr_video_sizes[] = {
255    { 3840, 2160 },
256    { 1920, 1080 },
257    { 1280,  720 },
258    {  640,  480 },
259    {  480,  320 }
260};
261
262
263const QCamera3HardwareInterface::QCameraMap<
264        camera_metadata_enum_android_sensor_test_pattern_mode_t,
265        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
266    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
267    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
268    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
269    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
270    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
271    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
272};
273
274/* Since there is no mapping for all the options some Android enum are not listed.
275 * Also, the order in this list is important because while mapping from HAL to Android it will
276 * traverse from lower to higher index which means that for HAL values that are map to different
277 * Android values, the traverse logic will select the first one found.
278 */
279const QCamera3HardwareInterface::QCameraMap<
280        camera_metadata_enum_android_sensor_reference_illuminant1_t,
281        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
289    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
290    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
291    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
292    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
293    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
294    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
295    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
296    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
297    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
302    { 60, CAM_HFR_MODE_60FPS},
303    { 90, CAM_HFR_MODE_90FPS},
304    { 120, CAM_HFR_MODE_120FPS},
305    { 150, CAM_HFR_MODE_150FPS},
306    { 180, CAM_HFR_MODE_180FPS},
307    { 210, CAM_HFR_MODE_210FPS},
308    { 240, CAM_HFR_MODE_240FPS},
309    { 480, CAM_HFR_MODE_480FPS},
310};
311
312camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
313    .initialize                         = QCamera3HardwareInterface::initialize,
314    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
315    .register_stream_buffers            = NULL,
316    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
317    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
318    .get_metadata_vendor_tag_ops        = NULL,
319    .dump                               = QCamera3HardwareInterface::dump,
320    .flush                              = QCamera3HardwareInterface::flush,
321    .reserved                           = {0},
322};
323
324/*===========================================================================
325 * FUNCTION   : QCamera3HardwareInterface
326 *
327 * DESCRIPTION: constructor of QCamera3HardwareInterface
328 *
329 * PARAMETERS :
330 *   @cameraId  : camera ID
331 *
332 * RETURN     : none
333 *==========================================================================*/
334QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
335        const camera_module_callbacks_t *callbacks)
336    : mCameraId(cameraId),
337      mCameraHandle(NULL),
338      mCameraInitialized(false),
339      mCallbackOps(NULL),
340      mMetadataChannel(NULL),
341      mPictureChannel(NULL),
342      mRawChannel(NULL),
343      mSupportChannel(NULL),
344      mAnalysisChannel(NULL),
345      mRawDumpChannel(NULL),
346      mDummyBatchChannel(NULL),
347      mChannelHandle(0),
348      mFirstConfiguration(true),
349      mFlush(false),
350      mFlushPerf(false),
351      mParamHeap(NULL),
352      mParameters(NULL),
353      mPrevParameters(NULL),
354      m_bIsVideo(false),
355      m_bIs4KVideo(false),
356      m_bEisSupportedSize(false),
357      m_bEisEnable(false),
358      m_MobicatMask(0),
359      mMinProcessedFrameDuration(0),
360      mMinJpegFrameDuration(0),
361      mMinRawFrameDuration(0),
362      mMetaFrameCount(0U),
363      mUpdateDebugLevel(false),
364      mCallbacks(callbacks),
365      mCaptureIntent(0),
366      mCacMode(0),
367      mHybridAeEnable(0),
368      mBatchSize(0),
369      mToBeQueuedVidBufs(0),
370      mHFRVideoFps(DEFAULT_VIDEO_FPS),
371      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
372      mFirstFrameNumberInBatch(0),
373      mNeedSensorRestart(false),
374      mLdafCalibExist(false),
375      mPowerHintEnabled(false),
376      mLastCustIntentFrmNum(-1),
377      mState(CLOSED)
378{
379    getLogLevel();
380    m_perfLock.lock_init();
381    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
382    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
383    mCameraDevice.common.close = close_camera_device;
384    mCameraDevice.ops = &mCameraOps;
385    mCameraDevice.priv = this;
386    gCamCapability[cameraId]->version = CAM_HAL_V3;
387    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
388    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
389    gCamCapability[cameraId]->min_num_pp_bufs = 3;
390
391    pthread_cond_init(&mBuffersCond, NULL);
392
393    pthread_cond_init(&mRequestCond, NULL);
394    mPendingLiveRequest = 0;
395    mCurrentRequestId = -1;
396    pthread_mutex_init(&mMutex, NULL);
397
398    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
399        mDefaultMetadata[i] = NULL;
400
401    // Getting system props of different kinds
402    char prop[PROPERTY_VALUE_MAX];
403    memset(prop, 0, sizeof(prop));
404    property_get("persist.camera.raw.dump", prop, "0");
405    mEnableRawDump = atoi(prop);
406    if (mEnableRawDump)
407        LOGD("Raw dump from Camera HAL enabled");
408
409    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
410    memset(mLdafCalib, 0, sizeof(mLdafCalib));
411
412    memset(prop, 0, sizeof(prop));
413    property_get("persist.camera.tnr.preview", prop, "0");
414    m_bTnrPreview = (uint8_t)atoi(prop);
415
416    memset(prop, 0, sizeof(prop));
417    property_get("persist.camera.tnr.video", prop, "0");
418    m_bTnrVideo = (uint8_t)atoi(prop);
419
420    //Load and read GPU library.
421    lib_surface_utils = NULL;
422    LINK_get_surface_pixel_alignment = NULL;
423    mSurfaceStridePadding = CAM_PAD_TO_32;
424    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
425    if (lib_surface_utils) {
426        *(void **)&LINK_get_surface_pixel_alignment =
427                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
428         if (LINK_get_surface_pixel_alignment) {
429             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
430         }
431         dlclose(lib_surface_utils);
432    }
433}
434
435/*===========================================================================
436 * FUNCTION   : ~QCamera3HardwareInterface
437 *
438 * DESCRIPTION: destructor of QCamera3HardwareInterface
439 *
440 * PARAMETERS : none
441 *
442 * RETURN     : none
443 *==========================================================================*/
444QCamera3HardwareInterface::~QCamera3HardwareInterface()
445{
446    LOGD("E");
447
448    /* Turn off current power hint before acquiring perfLock in case they
449     * conflict with each other */
450    disablePowerHint();
451
452    m_perfLock.lock_acq();
453
454    /* We need to stop all streams before deleting any stream */
455    if (mRawDumpChannel) {
456        mRawDumpChannel->stop();
457    }
458
459    // NOTE: 'camera3_stream_t *' objects are already freed at
460    //        this stage by the framework
461    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
462        it != mStreamInfo.end(); it++) {
463        QCamera3ProcessingChannel *channel = (*it)->channel;
464        if (channel) {
465            channel->stop();
466        }
467    }
468    if (mSupportChannel)
469        mSupportChannel->stop();
470
471    if (mAnalysisChannel) {
472        mAnalysisChannel->stop();
473    }
474    if (mMetadataChannel) {
475        mMetadataChannel->stop();
476    }
477    if (mChannelHandle) {
478        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
479                mChannelHandle);
480        LOGD("stopping channel %d", mChannelHandle);
481    }
482
483    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
484        it != mStreamInfo.end(); it++) {
485        QCamera3ProcessingChannel *channel = (*it)->channel;
486        if (channel)
487            delete channel;
488        free (*it);
489    }
490    if (mSupportChannel) {
491        delete mSupportChannel;
492        mSupportChannel = NULL;
493    }
494
495    if (mAnalysisChannel) {
496        delete mAnalysisChannel;
497        mAnalysisChannel = NULL;
498    }
499    if (mRawDumpChannel) {
500        delete mRawDumpChannel;
501        mRawDumpChannel = NULL;
502    }
503    if (mDummyBatchChannel) {
504        delete mDummyBatchChannel;
505        mDummyBatchChannel = NULL;
506    }
507    mPictureChannel = NULL;
508
509    if (mMetadataChannel) {
510        delete mMetadataChannel;
511        mMetadataChannel = NULL;
512    }
513
514    /* Clean up all channels */
515    if (mCameraInitialized) {
516        if(!mFirstConfiguration){
517            //send the last unconfigure
518            cam_stream_size_info_t stream_config_info;
519            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
520            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
521            stream_config_info.buffer_info.max_buffers =
522                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
523            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
524                    stream_config_info);
525            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
526            if (rc < 0) {
527                LOGE("set_parms failed for unconfigure");
528            }
529        }
530        deinitParameters();
531    }
532
533    if (mChannelHandle) {
534        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
535                mChannelHandle);
536        LOGH("deleting channel %d", mChannelHandle);
537        mChannelHandle = 0;
538    }
539
540    if (mState != CLOSED)
541        closeCamera();
542
543    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
544        req.mPendingBufferList.clear();
545    }
546    mPendingBuffersMap.mPendingBuffersInRequest.clear();
547    mPendingReprocessResultList.clear();
548    for (pendingRequestIterator i = mPendingRequestsList.begin();
549            i != mPendingRequestsList.end();) {
550        i = erasePendingRequest(i);
551    }
552    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
553        if (mDefaultMetadata[i])
554            free_camera_metadata(mDefaultMetadata[i]);
555
556    m_perfLock.lock_rel();
557    m_perfLock.lock_deinit();
558
559    pthread_cond_destroy(&mRequestCond);
560
561    pthread_cond_destroy(&mBuffersCond);
562
563    pthread_mutex_destroy(&mMutex);
564    LOGD("X");
565}
566
567/*===========================================================================
568 * FUNCTION   : erasePendingRequest
569 *
570 * DESCRIPTION: function to erase a desired pending request after freeing any
571 *              allocated memory
572 *
573 * PARAMETERS :
574 *   @i       : iterator pointing to pending request to be erased
575 *
576 * RETURN     : iterator pointing to the next request
577 *==========================================================================*/
578QCamera3HardwareInterface::pendingRequestIterator
579        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
580{
581    if (i->input_buffer != NULL) {
582        free(i->input_buffer);
583        i->input_buffer = NULL;
584    }
585    if (i->settings != NULL)
586        free_camera_metadata((camera_metadata_t*)i->settings);
587    return mPendingRequestsList.erase(i);
588}
589
590/*===========================================================================
591 * FUNCTION   : camEvtHandle
592 *
593 * DESCRIPTION: Function registered to mm-camera-interface to handle events
594 *
595 * PARAMETERS :
596 *   @camera_handle : interface layer camera handle
597 *   @evt           : ptr to event
598 *   @user_data     : user data ptr
599 *
600 * RETURN     : none
601 *==========================================================================*/
602void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
603                                          mm_camera_event_t *evt,
604                                          void *user_data)
605{
606    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
607    if (obj && evt) {
608        switch(evt->server_event_type) {
609            case CAM_EVENT_TYPE_DAEMON_DIED:
610                pthread_mutex_lock(&obj->mMutex);
611                obj->mState = ERROR;
612                pthread_mutex_unlock(&obj->mMutex);
613                LOGE("Fatal, camera daemon died");
614                break;
615
616            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
617                LOGD("HAL got request pull from Daemon");
618                pthread_mutex_lock(&obj->mMutex);
619                obj->mWokenUpByDaemon = true;
620                obj->unblockRequestIfNecessary();
621                pthread_mutex_unlock(&obj->mMutex);
622                break;
623
624            default:
625                LOGW("Warning: Unhandled event %d",
626                        evt->server_event_type);
627                break;
628        }
629    } else {
630        LOGE("NULL user_data/evt");
631    }
632}
633
634/*===========================================================================
635 * FUNCTION   : openCamera
636 *
637 * DESCRIPTION: open camera
638 *
639 * PARAMETERS :
640 *   @hw_device  : double ptr for camera device struct
641 *
642 * RETURN     : int32_t type of status
643 *              NO_ERROR  -- success
644 *              none-zero failure code
645 *==========================================================================*/
646int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
647{
648    int rc = 0;
649    if (mState != CLOSED) {
650        *hw_device = NULL;
651        return PERMISSION_DENIED;
652    }
653
654    m_perfLock.lock_acq();
655    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
656             mCameraId);
657
658    rc = openCamera();
659    if (rc == 0) {
660        *hw_device = &mCameraDevice.common;
661    } else
662        *hw_device = NULL;
663
664    m_perfLock.lock_rel();
665    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
666             mCameraId, rc);
667
668    if (rc == NO_ERROR) {
669        mState = OPENED;
670    }
671    return rc;
672}
673
674/*===========================================================================
675 * FUNCTION   : openCamera
676 *
677 * DESCRIPTION: open camera
678 *
679 * PARAMETERS : none
680 *
681 * RETURN     : int32_t type of status
682 *              NO_ERROR  -- success
683 *              none-zero failure code
684 *==========================================================================*/
685int QCamera3HardwareInterface::openCamera()
686{
687    int rc = 0;
688    char value[PROPERTY_VALUE_MAX];
689
690    KPI_ATRACE_CALL();
691    if (mCameraHandle) {
692        LOGE("Failure: Camera already opened");
693        return ALREADY_EXISTS;
694    }
695
696    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
697    if (rc < 0) {
698        LOGE("Failed to reserve flash for camera id: %d",
699                mCameraId);
700        return UNKNOWN_ERROR;
701    }
702
703    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
704    if (rc) {
705        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
706        return rc;
707    }
708
709    if (!mCameraHandle) {
710        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
711        return -ENODEV;
712    }
713
714    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
715            camEvtHandle, (void *)this);
716
717    if (rc < 0) {
718        LOGE("Error, failed to register event callback");
719        /* Not closing camera here since it is already handled in destructor */
720        return FAILED_TRANSACTION;
721    }
722
723    mExifParams.debug_params =
724            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
725    if (mExifParams.debug_params) {
726        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
727    } else {
728        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
729        return NO_MEMORY;
730    }
731    mFirstConfiguration = true;
732
733    //Notify display HAL that a camera session is active.
734    //But avoid calling the same during bootup because camera service might open/close
735    //cameras at boot time during its initialization and display service will also internally
736    //wait for camera service to initialize first while calling this display API, resulting in a
737    //deadlock situation. Since boot time camera open/close calls are made only to fetch
738    //capabilities, no need of this display bw optimization.
739    //Use "service.bootanim.exit" property to know boot status.
740    property_get("service.bootanim.exit", value, "0");
741    if (atoi(value) == 1) {
742        pthread_mutex_lock(&gCamLock);
743        if (gNumCameraSessions++ == 0) {
744            setCameraLaunchStatus(true);
745        }
746        pthread_mutex_unlock(&gCamLock);
747    }
748
749    return NO_ERROR;
750}
751
752/*===========================================================================
753 * FUNCTION   : closeCamera
754 *
755 * DESCRIPTION: close camera
756 *
757 * PARAMETERS : none
758 *
759 * RETURN     : int32_t type of status
760 *              NO_ERROR  -- success
761 *              none-zero failure code
762 *==========================================================================*/
763int QCamera3HardwareInterface::closeCamera()
764{
765    KPI_ATRACE_CALL();
766    int rc = NO_ERROR;
767    char value[PROPERTY_VALUE_MAX];
768
769    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
770             mCameraId);
771    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
772    mCameraHandle = NULL;
773
774    //Notify display HAL that there is no active camera session
775    //but avoid calling the same during bootup. Refer to openCamera
776    //for more details.
777    property_get("service.bootanim.exit", value, "0");
778    if (atoi(value) == 1) {
779        pthread_mutex_lock(&gCamLock);
780        if (--gNumCameraSessions == 0) {
781            setCameraLaunchStatus(false);
782        }
783        pthread_mutex_unlock(&gCamLock);
784    }
785
786    if (mExifParams.debug_params) {
787        free(mExifParams.debug_params);
788        mExifParams.debug_params = NULL;
789    }
790    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
791        LOGW("Failed to release flash for camera id: %d",
792                mCameraId);
793    }
794    mState = CLOSED;
795    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
796         mCameraId, rc);
797    return rc;
798}
799
800/*===========================================================================
801 * FUNCTION   : initialize
802 *
803 * DESCRIPTION: Initialize frameworks callback functions
804 *
805 * PARAMETERS :
806 *   @callback_ops : callback function to frameworks
807 *
808 * RETURN     :
809 *
810 *==========================================================================*/
811int QCamera3HardwareInterface::initialize(
812        const struct camera3_callback_ops *callback_ops)
813{
814    ATRACE_CALL();
815    int rc;
816
817    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
818    pthread_mutex_lock(&mMutex);
819
820    // Validate current state
821    switch (mState) {
822        case OPENED:
823            /* valid state */
824            break;
825
826        case ERROR:
827            pthread_mutex_unlock(&mMutex);
828            handleCameraDeviceError();
829            rc = -ENODEV;
830            goto err2;
831
832        default:
833            LOGE("Invalid state %d", mState);
834            rc = -ENODEV;
835            goto err1;
836    }
837
838    rc = initParameters();
839    if (rc < 0) {
840        LOGE("initParamters failed %d", rc);
841        goto err1;
842    }
843    mCallbackOps = callback_ops;
844
845    mChannelHandle = mCameraHandle->ops->add_channel(
846            mCameraHandle->camera_handle, NULL, NULL, this);
847    if (mChannelHandle == 0) {
848        LOGE("add_channel failed");
849        rc = -ENOMEM;
850        pthread_mutex_unlock(&mMutex);
851        return rc;
852    }
853
854    pthread_mutex_unlock(&mMutex);
855    mCameraInitialized = true;
856    mState = INITIALIZED;
857    LOGI("X");
858    return 0;
859
860err1:
861    pthread_mutex_unlock(&mMutex);
862err2:
863    return rc;
864}
865
866/*===========================================================================
867 * FUNCTION   : validateStreamDimensions
868 *
869 * DESCRIPTION: Check if the configuration requested are those advertised
870 *
871 * PARAMETERS :
872 *   @stream_list : streams to be configured
873 *
874 * RETURN     :
875 *
876 *==========================================================================*/
877int QCamera3HardwareInterface::validateStreamDimensions(
878        camera3_stream_configuration_t *streamList)
879{
880    int rc = NO_ERROR;
881    size_t count = 0;
882
883    camera3_stream_t *inputStream = NULL;
884    /*
885    * Loop through all streams to find input stream if it exists*
886    */
887    for (size_t i = 0; i< streamList->num_streams; i++) {
888        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
889            if (inputStream != NULL) {
890                LOGE("Error, Multiple input streams requested");
891                return -EINVAL;
892            }
893            inputStream = streamList->streams[i];
894        }
895    }
896    /*
897    * Loop through all streams requested in configuration
898    * Check if unsupported sizes have been requested on any of them
899    */
900    for (size_t j = 0; j < streamList->num_streams; j++) {
901        bool sizeFound = false;
902        camera3_stream_t *newStream = streamList->streams[j];
903
904        uint32_t rotatedHeight = newStream->height;
905        uint32_t rotatedWidth = newStream->width;
906        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
907                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
908            rotatedHeight = newStream->width;
909            rotatedWidth = newStream->height;
910        }
911
912        /*
913        * Sizes are different for each type of stream format check against
914        * appropriate table.
915        */
916        switch (newStream->format) {
917        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
918        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
919        case HAL_PIXEL_FORMAT_RAW10:
920            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
921            for (size_t i = 0; i < count; i++) {
922                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
923                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
924                    sizeFound = true;
925                    break;
926                }
927            }
928            break;
929        case HAL_PIXEL_FORMAT_BLOB:
930            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
931            /* Verify set size against generated sizes table */
932            for (size_t i = 0; i < count; i++) {
933                if (((int32_t)rotatedWidth ==
934                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
935                        ((int32_t)rotatedHeight ==
936                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
937                    sizeFound = true;
938                    break;
939                }
940            }
941            break;
942        case HAL_PIXEL_FORMAT_YCbCr_420_888:
943        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
944        default:
945            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
946                    || newStream->stream_type == CAMERA3_STREAM_INPUT
947                    || IS_USAGE_ZSL(newStream->usage)) {
948                if (((int32_t)rotatedWidth ==
949                                gCamCapability[mCameraId]->active_array_size.width) &&
950                                ((int32_t)rotatedHeight ==
951                                gCamCapability[mCameraId]->active_array_size.height)) {
952                    sizeFound = true;
953                    break;
954                }
955                /* We could potentially break here to enforce ZSL stream
956                 * set from frameworks always is full active array size
957                 * but it is not clear from the spc if framework will always
958                 * follow that, also we have logic to override to full array
959                 * size, so keeping the logic lenient at the moment
960                 */
961            }
962            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
963                    MAX_SIZES_CNT);
964            for (size_t i = 0; i < count; i++) {
965                if (((int32_t)rotatedWidth ==
966                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
967                            ((int32_t)rotatedHeight ==
968                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
969                    sizeFound = true;
970                    break;
971                }
972            }
973            break;
974        } /* End of switch(newStream->format) */
975
976        /* We error out even if a single stream has unsupported size set */
977        if (!sizeFound) {
978            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
979                    rotatedWidth, rotatedHeight, newStream->format,
980                    gCamCapability[mCameraId]->active_array_size.width,
981                    gCamCapability[mCameraId]->active_array_size.height);
982            rc = -EINVAL;
983            break;
984        }
985    } /* End of for each stream */
986    return rc;
987}
988
989/*==============================================================================
990 * FUNCTION   : isSupportChannelNeeded
991 *
992 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
993 *
994 * PARAMETERS :
995 *   @stream_list : streams to be configured
996 *   @stream_config_info : the config info for streams to be configured
997 *
998 * RETURN     : Boolen true/false decision
999 *
1000 *==========================================================================*/
1001bool QCamera3HardwareInterface::isSupportChannelNeeded(
1002        camera3_stream_configuration_t *streamList,
1003        cam_stream_size_info_t stream_config_info)
1004{
1005    uint32_t i;
1006    bool pprocRequested = false;
1007    /* Check for conditions where PProc pipeline does not have any streams*/
1008    for (i = 0; i < stream_config_info.num_streams; i++) {
1009        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1010                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1011            pprocRequested = true;
1012            break;
1013        }
1014    }
1015
1016    if (pprocRequested == false )
1017        return true;
1018
1019    /* Dummy stream needed if only raw or jpeg streams present */
1020    for (i = 0; i < streamList->num_streams; i++) {
1021        switch(streamList->streams[i]->format) {
1022            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1023            case HAL_PIXEL_FORMAT_RAW10:
1024            case HAL_PIXEL_FORMAT_RAW16:
1025            case HAL_PIXEL_FORMAT_BLOB:
1026                break;
1027            default:
1028                return false;
1029        }
1030    }
1031    return true;
1032}
1033
1034/*==============================================================================
1035 * FUNCTION   : getSensorOutputSize
1036 *
1037 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1038 *
1039 * PARAMETERS :
1040 *   @sensor_dim : sensor output dimension (output)
1041 *
1042 * RETURN     : int32_t type of status
1043 *              NO_ERROR  -- success
1044 *              none-zero failure code
1045 *
1046 *==========================================================================*/
1047int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1048{
1049    int32_t rc = NO_ERROR;
1050
1051    cam_dimension_t max_dim = {0, 0};
1052    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1053        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1054            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1055        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1056            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1057    }
1058
1059    clear_metadata_buffer(mParameters);
1060
1061    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1062            max_dim);
1063    if (rc != NO_ERROR) {
1064        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1065        return rc;
1066    }
1067
1068    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1069    if (rc != NO_ERROR) {
1070        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1071        return rc;
1072    }
1073
1074    clear_metadata_buffer(mParameters);
1075    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1076
1077    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1078            mParameters);
1079    if (rc != NO_ERROR) {
1080        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1081        return rc;
1082    }
1083
1084    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1085    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1086
1087    return rc;
1088}
1089
1090/*==============================================================================
1091 * FUNCTION   : enablePowerHint
1092 *
1093 * DESCRIPTION: enable single powerhint for preview and different video modes.
1094 *
1095 * PARAMETERS :
1096 *
1097 * RETURN     : NULL
1098 *
1099 *==========================================================================*/
1100void QCamera3HardwareInterface::enablePowerHint()
1101{
1102    if (!mPowerHintEnabled) {
1103        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1104        mPowerHintEnabled = true;
1105    }
1106}
1107
1108/*==============================================================================
1109 * FUNCTION   : disablePowerHint
1110 *
1111 * DESCRIPTION: disable current powerhint.
1112 *
1113 * PARAMETERS :
1114 *
1115 * RETURN     : NULL
1116 *
1117 *==========================================================================*/
1118void QCamera3HardwareInterface::disablePowerHint()
1119{
1120    if (mPowerHintEnabled) {
1121        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1122        mPowerHintEnabled = false;
1123    }
1124}
1125
1126/*==============================================================================
1127 * FUNCTION   : addToPPFeatureMask
1128 *
1129 * DESCRIPTION: add additional features to pp feature mask based on
1130 *              stream type and usecase
1131 *
1132 * PARAMETERS :
1133 *   @stream_format : stream type for feature mask
1134 *   @stream_idx : stream idx within postprocess_mask list to change
1135 *
1136 * RETURN     : NULL
1137 *
1138 *==========================================================================*/
1139void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1140        uint32_t stream_idx)
1141{
1142    char feature_mask_value[PROPERTY_VALUE_MAX];
1143    uint32_t feature_mask;
1144    int args_converted;
1145    int property_len;
1146
1147    /* Get feature mask from property */
1148    property_len = property_get("persist.camera.hal3.feature",
1149            feature_mask_value, "0");
1150    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1151            (feature_mask_value[1] == 'x')) {
1152        args_converted = sscanf(feature_mask_value, "0x%x", &feature_mask);
1153    } else {
1154        args_converted = sscanf(feature_mask_value, "%d", &feature_mask);
1155    }
1156    if (1 != args_converted) {
1157        feature_mask = 0;
1158        LOGE("Wrong feature mask %s", feature_mask_value);
1159        return;
1160    }
1161
1162    switch (stream_format) {
1163    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1164        /* Add LLVD to pp feature mask only if video hint is enabled */
1165        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1166            mStreamConfigInfo.postprocess_mask[stream_idx]
1167                    |= CAM_QTI_FEATURE_SW_TNR;
1168            LOGH("Added SW TNR to pp feature mask");
1169        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1170            mStreamConfigInfo.postprocess_mask[stream_idx]
1171                    |= CAM_QCOM_FEATURE_LLVD;
1172            LOGH("Added LLVD SeeMore to pp feature mask");
1173        }
1174        break;
1175    }
1176    default:
1177        break;
1178    }
1179    LOGD("PP feature mask %x",
1180            mStreamConfigInfo.postprocess_mask[stream_idx]);
1181}
1182
1183/*==============================================================================
1184 * FUNCTION   : updateFpsInPreviewBuffer
1185 *
1186 * DESCRIPTION: update FPS information in preview buffer.
1187 *
1188 * PARAMETERS :
1189 *   @metadata    : pointer to metadata buffer
1190 *   @frame_number: frame_number to look for in pending buffer list
1191 *
1192 * RETURN     : None
1193 *
1194 *==========================================================================*/
1195void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1196        uint32_t frame_number)
1197{
1198    // Mark all pending buffers for this particular request
1199    // with corresponding framerate information
1200    for (List<PendingBuffersInRequest>::iterator req =
1201            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1202            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1203        for(List<PendingBufferInfo>::iterator j =
1204                req->mPendingBufferList.begin();
1205                j != req->mPendingBufferList.end(); j++) {
1206            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1207            if ((req->frame_number == frame_number) &&
1208                (channel->getStreamTypeMask() &
1209                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1210                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1211                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1212                    int32_t cameraFps = float_range->max_fps;
1213                    struct private_handle_t *priv_handle =
1214                        (struct private_handle_t *)(*(j->buffer));
1215                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1216                }
1217            }
1218        }
1219    }
1220}
1221
1222/*===========================================================================
1223 * FUNCTION   : configureStreams
1224 *
1225 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1226 *              and output streams.
1227 *
1228 * PARAMETERS :
1229 *   @stream_list : streams to be configured
1230 *
1231 * RETURN     :
1232 *
1233 *==========================================================================*/
1234int QCamera3HardwareInterface::configureStreams(
1235        camera3_stream_configuration_t *streamList)
1236{
1237    ATRACE_CALL();
1238    int rc = 0;
1239
1240    // Acquire perfLock before configure streams
1241    m_perfLock.lock_acq();
1242    rc = configureStreamsPerfLocked(streamList);
1243    m_perfLock.lock_rel();
1244
1245    return rc;
1246}
1247
1248/*===========================================================================
1249 * FUNCTION   : configureStreamsPerfLocked
1250 *
1251 * DESCRIPTION: configureStreams while perfLock is held.
1252 *
1253 * PARAMETERS :
1254 *   @stream_list : streams to be configured
1255 *
1256 * RETURN     : int32_t type of status
1257 *              NO_ERROR  -- success
1258 *              none-zero failure code
1259 *==========================================================================*/
1260int QCamera3HardwareInterface::configureStreamsPerfLocked(
1261        camera3_stream_configuration_t *streamList)
1262{
1263    ATRACE_CALL();
1264    int rc = 0;
1265
1266    // Sanity check stream_list
1267    if (streamList == NULL) {
1268        LOGE("NULL stream configuration");
1269        return BAD_VALUE;
1270    }
1271    if (streamList->streams == NULL) {
1272        LOGE("NULL stream list");
1273        return BAD_VALUE;
1274    }
1275
1276    if (streamList->num_streams < 1) {
1277        LOGE("Bad number of streams requested: %d",
1278                streamList->num_streams);
1279        return BAD_VALUE;
1280    }
1281
1282    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1283        LOGE("Maximum number of streams %d exceeded: %d",
1284                MAX_NUM_STREAMS, streamList->num_streams);
1285        return BAD_VALUE;
1286    }
1287
1288    mOpMode = streamList->operation_mode;
1289    LOGD("mOpMode: %d", mOpMode);
1290
1291    /* first invalidate all the steams in the mStreamList
1292     * if they appear again, they will be validated */
1293    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1294            it != mStreamInfo.end(); it++) {
1295        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1296        channel->stop();
1297        (*it)->status = INVALID;
1298    }
1299
1300    if (mRawDumpChannel) {
1301        mRawDumpChannel->stop();
1302        delete mRawDumpChannel;
1303        mRawDumpChannel = NULL;
1304    }
1305
1306    if (mSupportChannel)
1307        mSupportChannel->stop();
1308
1309    if (mAnalysisChannel) {
1310        mAnalysisChannel->stop();
1311    }
1312    if (mMetadataChannel) {
1313        /* If content of mStreamInfo is not 0, there is metadata stream */
1314        mMetadataChannel->stop();
1315    }
1316    if (mChannelHandle) {
1317        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1318                mChannelHandle);
1319        LOGD("stopping channel %d", mChannelHandle);
1320    }
1321
1322    pthread_mutex_lock(&mMutex);
1323
1324    // Check state
1325    switch (mState) {
1326        case INITIALIZED:
1327        case CONFIGURED:
1328        case STARTED:
1329            /* valid state */
1330            break;
1331
1332        case ERROR:
1333            pthread_mutex_unlock(&mMutex);
1334            handleCameraDeviceError();
1335            return -ENODEV;
1336
1337        default:
1338            LOGE("Invalid state %d", mState);
1339            pthread_mutex_unlock(&mMutex);
1340            return -ENODEV;
1341    }
1342
1343    /* Check whether we have video stream */
1344    m_bIs4KVideo = false;
1345    m_bIsVideo = false;
1346    m_bEisSupportedSize = false;
1347    m_bTnrEnabled = false;
1348    bool isZsl = false;
1349    uint32_t videoWidth = 0U;
1350    uint32_t videoHeight = 0U;
1351    size_t rawStreamCnt = 0;
1352    size_t stallStreamCnt = 0;
1353    size_t processedStreamCnt = 0;
1354    // Number of streams on ISP encoder path
1355    size_t numStreamsOnEncoder = 0;
1356    size_t numYuv888OnEncoder = 0;
1357    bool bYuv888OverrideJpeg = false;
1358    cam_dimension_t largeYuv888Size = {0, 0};
1359    cam_dimension_t maxViewfinderSize = {0, 0};
1360    bool bJpegExceeds4K = false;
1361    bool bUseCommonFeatureMask = false;
1362    uint32_t commonFeatureMask = 0;
1363    bool bSmallJpegSize = false;
1364    uint32_t width_ratio;
1365    uint32_t height_ratio;
1366    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1367    camera3_stream_t *inputStream = NULL;
1368    bool isJpeg = false;
1369    cam_dimension_t jpegSize = {0, 0};
1370
1371    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1372
1373    /*EIS configuration*/
1374    bool eisSupported = false;
1375    bool oisSupported = false;
1376    int32_t margin_index = -1;
1377    uint8_t eis_prop_set;
1378    uint32_t maxEisWidth = 0;
1379    uint32_t maxEisHeight = 0;
1380
1381    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1382
1383    size_t count = IS_TYPE_MAX;
1384    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1385    for (size_t i = 0; i < count; i++) {
1386        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1387            eisSupported = true;
1388            margin_index = (int32_t)i;
1389            break;
1390        }
1391    }
1392
1393    count = CAM_OPT_STAB_MAX;
1394    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1395    for (size_t i = 0; i < count; i++) {
1396        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1397            oisSupported = true;
1398            break;
1399        }
1400    }
1401
1402    if (eisSupported) {
1403        maxEisWidth = MAX_EIS_WIDTH;
1404        maxEisHeight = MAX_EIS_HEIGHT;
1405    }
1406
1407    /* EIS setprop control */
1408    char eis_prop[PROPERTY_VALUE_MAX];
1409    memset(eis_prop, 0, sizeof(eis_prop));
1410    property_get("persist.camera.eis.enable", eis_prop, "0");
1411    eis_prop_set = (uint8_t)atoi(eis_prop);
1412
1413    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1414            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1415
1416    /* stream configurations */
1417    for (size_t i = 0; i < streamList->num_streams; i++) {
1418        camera3_stream_t *newStream = streamList->streams[i];
1419        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1420                "height = %d, rotation = %d, usage = 0x%x",
1421                 i, newStream->stream_type, newStream->format,
1422                newStream->width, newStream->height, newStream->rotation,
1423                newStream->usage);
1424        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1425                newStream->stream_type == CAMERA3_STREAM_INPUT){
1426            isZsl = true;
1427        }
1428        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1429            inputStream = newStream;
1430        }
1431
1432        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1433            isJpeg = true;
1434            jpegSize.width = newStream->width;
1435            jpegSize.height = newStream->height;
1436            if (newStream->width > VIDEO_4K_WIDTH ||
1437                    newStream->height > VIDEO_4K_HEIGHT)
1438                bJpegExceeds4K = true;
1439        }
1440
1441        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1442                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1443            m_bIsVideo = true;
1444            videoWidth = newStream->width;
1445            videoHeight = newStream->height;
1446            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1447                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1448                m_bIs4KVideo = true;
1449            }
1450            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1451                                  (newStream->height <= maxEisHeight);
1452        }
1453        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1454                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1455            switch (newStream->format) {
1456            case HAL_PIXEL_FORMAT_BLOB:
1457                stallStreamCnt++;
1458                if (isOnEncoder(maxViewfinderSize, newStream->width,
1459                        newStream->height)) {
1460                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1461                    numStreamsOnEncoder++;
1462                }
1463                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1464                        newStream->width);
1465                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1466                        newStream->height);;
1467                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1468                        "FATAL: max_downscale_factor cannot be zero and so assert");
1469                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1470                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1471                    LOGH("Setting small jpeg size flag to true");
1472                    bSmallJpegSize = true;
1473                }
1474                break;
1475            case HAL_PIXEL_FORMAT_RAW10:
1476            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477            case HAL_PIXEL_FORMAT_RAW16:
1478                rawStreamCnt++;
1479                break;
1480            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1481                processedStreamCnt++;
1482                if (isOnEncoder(maxViewfinderSize, newStream->width,
1483                        newStream->height)) {
1484                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1485                            IS_USAGE_ZSL(newStream->usage)) {
1486                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1487                    } else {
1488                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1489                    }
1490                    numStreamsOnEncoder++;
1491                }
1492                break;
1493            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1494                processedStreamCnt++;
1495                if (isOnEncoder(maxViewfinderSize, newStream->width,
1496                        newStream->height)) {
1497                    // If Yuv888 size is not greater than 4K, set feature mask
1498                    // to SUPERSET so that it support concurrent request on
1499                    // YUV and JPEG.
1500                    if (newStream->width <= VIDEO_4K_WIDTH &&
1501                            newStream->height <= VIDEO_4K_HEIGHT) {
1502                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1503                    } else {
1504                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1505                    }
1506                    numStreamsOnEncoder++;
1507                    numYuv888OnEncoder++;
1508                    largeYuv888Size.width = newStream->width;
1509                    largeYuv888Size.height = newStream->height;
1510                }
1511                break;
1512            default:
1513                processedStreamCnt++;
1514                if (isOnEncoder(maxViewfinderSize, newStream->width,
1515                        newStream->height)) {
1516                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1517                    numStreamsOnEncoder++;
1518                }
1519                break;
1520            }
1521
1522        }
1523    }
1524
1525    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1526        !m_bIsVideo) {
1527        m_bEisEnable = false;
1528    }
1529
1530    /* Logic to enable/disable TNR based on specific config size/etc.*/
1531    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1532            ((videoWidth == 1920 && videoHeight == 1080) ||
1533            (videoWidth == 1280 && videoHeight == 720)) &&
1534            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1535        m_bTnrEnabled = true;
1536
1537    /* Check if num_streams is sane */
1538    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1539            rawStreamCnt > MAX_RAW_STREAMS ||
1540            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1541        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1542                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1543        pthread_mutex_unlock(&mMutex);
1544        return -EINVAL;
1545    }
1546    /* Check whether we have zsl stream or 4k video case */
1547    if (isZsl && m_bIsVideo) {
1548        LOGE("Currently invalid configuration ZSL&Video!");
1549        pthread_mutex_unlock(&mMutex);
1550        return -EINVAL;
1551    }
1552    /* Check if stream sizes are sane */
1553    if (numStreamsOnEncoder > 2) {
1554        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1555        pthread_mutex_unlock(&mMutex);
1556        return -EINVAL;
1557    } else if (1 < numStreamsOnEncoder){
1558        bUseCommonFeatureMask = true;
1559        LOGH("Multiple streams above max viewfinder size, common mask needed");
1560    }
1561
1562    /* Check if BLOB size is greater than 4k in 4k recording case */
1563    if (m_bIs4KVideo && bJpegExceeds4K) {
1564        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1565        pthread_mutex_unlock(&mMutex);
1566        return -EINVAL;
1567    }
1568
1569    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1570    // the YUV stream's size is greater or equal to the JPEG size, set common
1571    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1572    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1573            jpegSize.width, jpegSize.height) &&
1574            largeYuv888Size.width > jpegSize.width &&
1575            largeYuv888Size.height > jpegSize.height) {
1576        bYuv888OverrideJpeg = true;
1577    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1578        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1579    }
1580
1581    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %x",
1582            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1583            commonFeatureMask);
1584    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1585            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1586
1587    rc = validateStreamDimensions(streamList);
1588    if (rc == NO_ERROR) {
1589        rc = validateStreamRotations(streamList);
1590    }
1591    if (rc != NO_ERROR) {
1592        LOGE("Invalid stream configuration requested!");
1593        pthread_mutex_unlock(&mMutex);
1594        return rc;
1595    }
1596
1597    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1598    camera3_stream_t *jpegStream = NULL;
1599    for (size_t i = 0; i < streamList->num_streams; i++) {
1600        camera3_stream_t *newStream = streamList->streams[i];
1601        LOGH("newStream type = %d, stream format = %d "
1602                "stream size : %d x %d, stream rotation = %d",
1603                 newStream->stream_type, newStream->format,
1604                newStream->width, newStream->height, newStream->rotation);
1605        //if the stream is in the mStreamList validate it
1606        bool stream_exists = false;
1607        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1608                it != mStreamInfo.end(); it++) {
1609            if ((*it)->stream == newStream) {
1610                QCamera3ProcessingChannel *channel =
1611                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1612                stream_exists = true;
1613                if (channel)
1614                    delete channel;
1615                (*it)->status = VALID;
1616                (*it)->stream->priv = NULL;
1617                (*it)->channel = NULL;
1618            }
1619        }
1620        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1621            //new stream
1622            stream_info_t* stream_info;
1623            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1624            if (!stream_info) {
1625               LOGE("Could not allocate stream info");
1626               rc = -ENOMEM;
1627               pthread_mutex_unlock(&mMutex);
1628               return rc;
1629            }
1630            stream_info->stream = newStream;
1631            stream_info->status = VALID;
1632            stream_info->channel = NULL;
1633            mStreamInfo.push_back(stream_info);
1634        }
1635        /* Covers Opaque ZSL and API1 F/W ZSL */
1636        if (IS_USAGE_ZSL(newStream->usage)
1637                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1638            if (zslStream != NULL) {
1639                LOGE("Multiple input/reprocess streams requested!");
1640                pthread_mutex_unlock(&mMutex);
1641                return BAD_VALUE;
1642            }
1643            zslStream = newStream;
1644        }
1645        /* Covers YUV reprocess */
1646        if (inputStream != NULL) {
1647            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1648                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1649                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1650                    && inputStream->width == newStream->width
1651                    && inputStream->height == newStream->height) {
1652                if (zslStream != NULL) {
1653                    /* This scenario indicates multiple YUV streams with same size
1654                     * as input stream have been requested, since zsl stream handle
1655                     * is solely use for the purpose of overriding the size of streams
1656                     * which share h/w streams we will just make a guess here as to
1657                     * which of the stream is a ZSL stream, this will be refactored
1658                     * once we make generic logic for streams sharing encoder output
1659                     */
1660                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1661                }
1662                zslStream = newStream;
1663            }
1664        }
1665        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1666            jpegStream = newStream;
1667        }
1668    }
1669
1670    /* If a zsl stream is set, we know that we have configured at least one input or
1671       bidirectional stream */
1672    if (NULL != zslStream) {
1673        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1674        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1675        mInputStreamInfo.format = zslStream->format;
1676        mInputStreamInfo.usage = zslStream->usage;
1677        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1678                 mInputStreamInfo.dim.width,
1679                mInputStreamInfo.dim.height,
1680                mInputStreamInfo.format, mInputStreamInfo.usage);
1681    }
1682
1683    cleanAndSortStreamInfo();
1684    if (mMetadataChannel) {
1685        delete mMetadataChannel;
1686        mMetadataChannel = NULL;
1687    }
1688    if (mSupportChannel) {
1689        delete mSupportChannel;
1690        mSupportChannel = NULL;
1691    }
1692
1693    if (mAnalysisChannel) {
1694        delete mAnalysisChannel;
1695        mAnalysisChannel = NULL;
1696    }
1697
1698    if (mDummyBatchChannel) {
1699        delete mDummyBatchChannel;
1700        mDummyBatchChannel = NULL;
1701    }
1702
1703    //Create metadata channel and initialize it
1704    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1705                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1706                    &padding_info, CAM_QCOM_FEATURE_NONE, this);
1707    if (mMetadataChannel == NULL) {
1708        LOGE("failed to allocate metadata channel");
1709        rc = -ENOMEM;
1710        pthread_mutex_unlock(&mMutex);
1711        return rc;
1712    }
1713    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1714    if (rc < 0) {
1715        LOGE("metadata channel initialization failed");
1716        delete mMetadataChannel;
1717        mMetadataChannel = NULL;
1718        pthread_mutex_unlock(&mMutex);
1719        return rc;
1720    }
1721
1722    // Create analysis stream all the time, even when h/w support is not available
1723    {
1724        mAnalysisChannel = new QCamera3SupportChannel(
1725                mCameraHandle->camera_handle,
1726                mChannelHandle,
1727                mCameraHandle->ops,
1728                &gCamCapability[mCameraId]->analysis_padding_info,
1729                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1730                CAM_STREAM_TYPE_ANALYSIS,
1731                &gCamCapability[mCameraId]->analysis_recommended_res,
1732                (gCamCapability[mCameraId]->analysis_recommended_format
1733                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1734                : CAM_FORMAT_YUV_420_NV21),
1735                gCamCapability[mCameraId]->hw_analysis_supported,
1736                this,
1737                0); // force buffer count to 0
1738        if (!mAnalysisChannel) {
1739            LOGE("H/W Analysis channel cannot be created");
1740            pthread_mutex_unlock(&mMutex);
1741            return -ENOMEM;
1742        }
1743    }
1744
1745    bool isRawStreamRequested = false;
1746    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1747    /* Allocate channel objects for the requested streams */
1748    for (size_t i = 0; i < streamList->num_streams; i++) {
1749        camera3_stream_t *newStream = streamList->streams[i];
1750        uint32_t stream_usage = newStream->usage;
1751        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1752        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1753        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1754                || IS_USAGE_ZSL(newStream->usage)) &&
1755            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1756            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1757            if (bUseCommonFeatureMask) {
1758                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1759                        commonFeatureMask;
1760            } else {
1761                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1762                        CAM_QCOM_FEATURE_NONE;
1763            }
1764
1765        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1766                LOGH("Input stream configured, reprocess config");
1767        } else {
1768            //for non zsl streams find out the format
1769            switch (newStream->format) {
1770            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1771            {
1772                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1773                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1774                /* add additional features to pp feature mask */
1775                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1776                        mStreamConfigInfo.num_streams);
1777
1778                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1779                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1780                                CAM_STREAM_TYPE_VIDEO;
1781                    if (m_bTnrEnabled && m_bTnrVideo) {
1782                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1783                            CAM_QCOM_FEATURE_CPP_TNR;
1784                    }
1785                } else {
1786                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1787                            CAM_STREAM_TYPE_PREVIEW;
1788                    if (m_bTnrEnabled && m_bTnrPreview) {
1789                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1790                                CAM_QCOM_FEATURE_CPP_TNR;
1791                    }
1792                    padding_info.width_padding = mSurfaceStridePadding;
1793                    padding_info.height_padding = CAM_PAD_TO_2;
1794                }
1795                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1796                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1797                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1798                            newStream->height;
1799                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1800                            newStream->width;
1801                }
1802            }
1803            break;
1804            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1805                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1806                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1807                    if (bUseCommonFeatureMask)
1808                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1809                                commonFeatureMask;
1810                    else
1811                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1812                                CAM_QCOM_FEATURE_NONE;
1813                } else {
1814                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1815                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1816                }
1817            break;
1818            case HAL_PIXEL_FORMAT_BLOB:
1819                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1820                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1821                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1822                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1823                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1824                } else {
1825                    if (bUseCommonFeatureMask &&
1826                            isOnEncoder(maxViewfinderSize, newStream->width,
1827                            newStream->height)) {
1828                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1829                    } else {
1830                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1831                    }
1832                }
1833                if (isZsl) {
1834                    if (zslStream) {
1835                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1836                                (int32_t)zslStream->width;
1837                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1838                                (int32_t)zslStream->height;
1839                    } else {
1840                        LOGE("Error, No ZSL stream identified");
1841                        pthread_mutex_unlock(&mMutex);
1842                        return -EINVAL;
1843                    }
1844                } else if (m_bIs4KVideo) {
1845                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1846                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1847                } else if (bYuv888OverrideJpeg) {
1848                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1849                            (int32_t)largeYuv888Size.width;
1850                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1851                            (int32_t)largeYuv888Size.height;
1852                }
1853                break;
1854            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1855            case HAL_PIXEL_FORMAT_RAW16:
1856            case HAL_PIXEL_FORMAT_RAW10:
1857                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1858                isRawStreamRequested = true;
1859                break;
1860            default:
1861                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1862                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1863                break;
1864            }
1865        }
1866
1867        if (newStream->priv == NULL) {
1868            //New stream, construct channel
1869            switch (newStream->stream_type) {
1870            case CAMERA3_STREAM_INPUT:
1871                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1872                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1873                break;
1874            case CAMERA3_STREAM_BIDIRECTIONAL:
1875                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1876                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1877                break;
1878            case CAMERA3_STREAM_OUTPUT:
1879                /* For video encoding stream, set read/write rarely
1880                 * flag so that they may be set to un-cached */
1881                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1882                    newStream->usage |=
1883                         (GRALLOC_USAGE_SW_READ_RARELY |
1884                         GRALLOC_USAGE_SW_WRITE_RARELY |
1885                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1886                else if (IS_USAGE_ZSL(newStream->usage))
1887                {
1888                    LOGD("ZSL usage flag skipping");
1889                }
1890                else if (newStream == zslStream
1891                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1892                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1893                } else
1894                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1895                break;
1896            default:
1897                LOGE("Invalid stream_type %d", newStream->stream_type);
1898                break;
1899            }
1900
1901            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1902                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1903                QCamera3ProcessingChannel *channel = NULL;
1904                switch (newStream->format) {
1905                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1906                    if ((newStream->usage &
1907                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1908                            (streamList->operation_mode ==
1909                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1910                    ) {
1911                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1912                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1913                                &gCamCapability[mCameraId]->padding_info,
1914                                this,
1915                                newStream,
1916                                (cam_stream_type_t)
1917                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1918                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1919                                mMetadataChannel,
1920                                0); //heap buffers are not required for HFR video channel
1921                        if (channel == NULL) {
1922                            LOGE("allocation of channel failed");
1923                            pthread_mutex_unlock(&mMutex);
1924                            return -ENOMEM;
1925                        }
1926                        //channel->getNumBuffers() will return 0 here so use
1927                        //MAX_INFLIGH_HFR_REQUESTS
1928                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1929                        newStream->priv = channel;
1930                        LOGI("num video buffers in HFR mode: %d",
1931                                 MAX_INFLIGHT_HFR_REQUESTS);
1932                    } else {
1933                        /* Copy stream contents in HFR preview only case to create
1934                         * dummy batch channel so that sensor streaming is in
1935                         * HFR mode */
1936                        if (!m_bIsVideo && (streamList->operation_mode ==
1937                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1938                            mDummyBatchStream = *newStream;
1939                        }
1940                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1941                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1942                                &gCamCapability[mCameraId]->padding_info,
1943                                this,
1944                                newStream,
1945                                (cam_stream_type_t)
1946                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1947                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1948                                mMetadataChannel,
1949                                MAX_INFLIGHT_REQUESTS);
1950                        if (channel == NULL) {
1951                            LOGE("allocation of channel failed");
1952                            pthread_mutex_unlock(&mMutex);
1953                            return -ENOMEM;
1954                        }
1955                        newStream->max_buffers = channel->getNumBuffers();
1956                        newStream->priv = channel;
1957                    }
1958                    break;
1959                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1960                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1961                            mChannelHandle,
1962                            mCameraHandle->ops, captureResultCb,
1963                            &padding_info,
1964                            this,
1965                            newStream,
1966                            (cam_stream_type_t)
1967                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1968                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1969                            mMetadataChannel);
1970                    if (channel == NULL) {
1971                        LOGE("allocation of YUV channel failed");
1972                        pthread_mutex_unlock(&mMutex);
1973                        return -ENOMEM;
1974                    }
1975                    newStream->max_buffers = channel->getNumBuffers();
1976                    newStream->priv = channel;
1977                    break;
1978                }
1979                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1980                case HAL_PIXEL_FORMAT_RAW16:
1981                case HAL_PIXEL_FORMAT_RAW10:
1982                    mRawChannel = new QCamera3RawChannel(
1983                            mCameraHandle->camera_handle, mChannelHandle,
1984                            mCameraHandle->ops, captureResultCb,
1985                            &padding_info,
1986                            this, newStream, CAM_QCOM_FEATURE_NONE,
1987                            mMetadataChannel,
1988                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1989                    if (mRawChannel == NULL) {
1990                        LOGE("allocation of raw channel failed");
1991                        pthread_mutex_unlock(&mMutex);
1992                        return -ENOMEM;
1993                    }
1994                    newStream->max_buffers = mRawChannel->getNumBuffers();
1995                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1996                    break;
1997                case HAL_PIXEL_FORMAT_BLOB:
1998                    // Max live snapshot inflight buffer is 1. This is to mitigate
1999                    // frame drop issues for video snapshot. The more buffers being
2000                    // allocated, the more frame drops there are.
2001                    mPictureChannel = new QCamera3PicChannel(
2002                            mCameraHandle->camera_handle, mChannelHandle,
2003                            mCameraHandle->ops, captureResultCb,
2004                            &padding_info, this, newStream,
2005                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2006                            m_bIs4KVideo, isZsl, mMetadataChannel,
2007                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2008                    if (mPictureChannel == NULL) {
2009                        LOGE("allocation of channel failed");
2010                        pthread_mutex_unlock(&mMutex);
2011                        return -ENOMEM;
2012                    }
2013                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2014                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2015                    mPictureChannel->overrideYuvSize(
2016                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2017                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2018                    break;
2019
2020                default:
2021                    LOGE("not a supported format 0x%x", newStream->format);
2022                    break;
2023                }
2024            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2025                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2026            } else {
2027                LOGE("Error, Unknown stream type");
2028                pthread_mutex_unlock(&mMutex);
2029                return -EINVAL;
2030            }
2031
2032            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2033            if (channel != NULL && channel->isUBWCEnabled()) {
2034                cam_format_t fmt = channel->getStreamDefaultFormat(
2035                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
2036                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2037                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2038                }
2039            }
2040
2041            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2042                    it != mStreamInfo.end(); it++) {
2043                if ((*it)->stream == newStream) {
2044                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2045                    break;
2046                }
2047            }
2048        } else {
2049            // Channel already exists for this stream
2050            // Do nothing for now
2051        }
2052        padding_info = gCamCapability[mCameraId]->padding_info;
2053
2054        /* Do not add entries for input stream in metastream info
2055         * since there is no real stream associated with it
2056         */
2057        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2058            mStreamConfigInfo.num_streams++;
2059    }
2060
2061    //RAW DUMP channel
2062    if (mEnableRawDump && isRawStreamRequested == false){
2063        cam_dimension_t rawDumpSize;
2064        rawDumpSize = getMaxRawSize(mCameraId);
2065        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2066                                  mChannelHandle,
2067                                  mCameraHandle->ops,
2068                                  rawDumpSize,
2069                                  &padding_info,
2070                                  this, CAM_QCOM_FEATURE_NONE);
2071        if (!mRawDumpChannel) {
2072            LOGE("Raw Dump channel cannot be created");
2073            pthread_mutex_unlock(&mMutex);
2074            return -ENOMEM;
2075        }
2076    }
2077
2078
2079    if (mAnalysisChannel) {
2080        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2081                gCamCapability[mCameraId]->analysis_recommended_res;
2082        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2083                CAM_STREAM_TYPE_ANALYSIS;
2084        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2085                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2086        mStreamConfigInfo.num_streams++;
2087    }
2088
2089    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2090        mSupportChannel = new QCamera3SupportChannel(
2091                mCameraHandle->camera_handle,
2092                mChannelHandle,
2093                mCameraHandle->ops,
2094                &gCamCapability[mCameraId]->padding_info,
2095                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
2096                CAM_STREAM_TYPE_CALLBACK,
2097                &QCamera3SupportChannel::kDim,
2098                CAM_FORMAT_YUV_420_NV21,
2099                gCamCapability[mCameraId]->hw_analysis_supported,
2100                this);
2101        if (!mSupportChannel) {
2102            LOGE("dummy channel cannot be created");
2103            pthread_mutex_unlock(&mMutex);
2104            return -ENOMEM;
2105        }
2106    }
2107
2108    if (mSupportChannel) {
2109        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2110                QCamera3SupportChannel::kDim;
2111        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2112                CAM_STREAM_TYPE_CALLBACK;
2113        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2114                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2115        mStreamConfigInfo.num_streams++;
2116    }
2117
2118    if (mRawDumpChannel) {
2119        cam_dimension_t rawSize;
2120        rawSize = getMaxRawSize(mCameraId);
2121        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2122                rawSize;
2123        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2124                CAM_STREAM_TYPE_RAW;
2125        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2126                CAM_QCOM_FEATURE_NONE;
2127        mStreamConfigInfo.num_streams++;
2128    }
2129    /* In HFR mode, if video stream is not added, create a dummy channel so that
2130     * ISP can create a batch mode even for preview only case. This channel is
2131     * never 'start'ed (no stream-on), it is only 'initialized'  */
2132    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2133            !m_bIsVideo) {
2134        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2135                mChannelHandle,
2136                mCameraHandle->ops, captureResultCb,
2137                &gCamCapability[mCameraId]->padding_info,
2138                this,
2139                &mDummyBatchStream,
2140                CAM_STREAM_TYPE_VIDEO,
2141                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
2142                mMetadataChannel);
2143        if (NULL == mDummyBatchChannel) {
2144            LOGE("creation of mDummyBatchChannel failed."
2145                    "Preview will use non-hfr sensor mode ");
2146        }
2147    }
2148    if (mDummyBatchChannel) {
2149        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2150                mDummyBatchStream.width;
2151        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2152                mDummyBatchStream.height;
2153        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2154                CAM_STREAM_TYPE_VIDEO;
2155        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2156                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2157        mStreamConfigInfo.num_streams++;
2158    }
2159
2160    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2161    mStreamConfigInfo.buffer_info.max_buffers =
2162            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2163
2164    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2165    for (pendingRequestIterator i = mPendingRequestsList.begin();
2166            i != mPendingRequestsList.end();) {
2167        i = erasePendingRequest(i);
2168    }
2169    mPendingFrameDropList.clear();
2170    // Initialize/Reset the pending buffers list
2171    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2172        req.mPendingBufferList.clear();
2173    }
2174    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2175
2176    mPendingReprocessResultList.clear();
2177
2178    mCurJpegMeta.clear();
2179    //Get min frame duration for this streams configuration
2180    deriveMinFrameDuration();
2181
2182    // Update state
2183    mState = CONFIGURED;
2184
2185    pthread_mutex_unlock(&mMutex);
2186
2187    return rc;
2188}
2189
2190/*===========================================================================
2191 * FUNCTION   : validateCaptureRequest
2192 *
2193 * DESCRIPTION: validate a capture request from camera service
2194 *
2195 * PARAMETERS :
2196 *   @request : request from framework to process
2197 *
2198 * RETURN     :
2199 *
2200 *==========================================================================*/
2201int QCamera3HardwareInterface::validateCaptureRequest(
2202                    camera3_capture_request_t *request)
2203{
2204    ssize_t idx = 0;
2205    const camera3_stream_buffer_t *b;
2206    CameraMetadata meta;
2207
2208    /* Sanity check the request */
2209    if (request == NULL) {
2210        LOGE("NULL capture request");
2211        return BAD_VALUE;
2212    }
2213
2214    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2215        /*settings cannot be null for the first request*/
2216        return BAD_VALUE;
2217    }
2218
2219    uint32_t frameNumber = request->frame_number;
2220    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2221        LOGE("Request %d: No output buffers provided!",
2222                __FUNCTION__, frameNumber);
2223        return BAD_VALUE;
2224    }
2225    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2226        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2227                 request->num_output_buffers, MAX_NUM_STREAMS);
2228        return BAD_VALUE;
2229    }
2230    if (request->input_buffer != NULL) {
2231        b = request->input_buffer;
2232        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2233            LOGE("Request %d: Buffer %ld: Status not OK!",
2234                     frameNumber, (long)idx);
2235            return BAD_VALUE;
2236        }
2237        if (b->release_fence != -1) {
2238            LOGE("Request %d: Buffer %ld: Has a release fence!",
2239                     frameNumber, (long)idx);
2240            return BAD_VALUE;
2241        }
2242        if (b->buffer == NULL) {
2243            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2244                     frameNumber, (long)idx);
2245            return BAD_VALUE;
2246        }
2247    }
2248
2249    // Validate all buffers
2250    b = request->output_buffers;
2251    do {
2252        QCamera3ProcessingChannel *channel =
2253                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2254        if (channel == NULL) {
2255            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2256                     frameNumber, (long)idx);
2257            return BAD_VALUE;
2258        }
2259        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2260            LOGE("Request %d: Buffer %ld: Status not OK!",
2261                     frameNumber, (long)idx);
2262            return BAD_VALUE;
2263        }
2264        if (b->release_fence != -1) {
2265            LOGE("Request %d: Buffer %ld: Has a release fence!",
2266                     frameNumber, (long)idx);
2267            return BAD_VALUE;
2268        }
2269        if (b->buffer == NULL) {
2270            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2271                     frameNumber, (long)idx);
2272            return BAD_VALUE;
2273        }
2274        if (*(b->buffer) == NULL) {
2275            LOGE("Request %d: Buffer %ld: NULL private handle!",
2276                     frameNumber, (long)idx);
2277            return BAD_VALUE;
2278        }
2279        idx++;
2280        b = request->output_buffers + idx;
2281    } while (idx < (ssize_t)request->num_output_buffers);
2282
2283    return NO_ERROR;
2284}
2285
2286/*===========================================================================
2287 * FUNCTION   : deriveMinFrameDuration
2288 *
2289 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2290 *              on currently configured streams.
2291 *
2292 * PARAMETERS : NONE
2293 *
2294 * RETURN     : NONE
2295 *
2296 *==========================================================================*/
2297void QCamera3HardwareInterface::deriveMinFrameDuration()
2298{
2299    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2300
2301    maxJpegDim = 0;
2302    maxProcessedDim = 0;
2303    maxRawDim = 0;
2304
2305    // Figure out maximum jpeg, processed, and raw dimensions
2306    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2307        it != mStreamInfo.end(); it++) {
2308
2309        // Input stream doesn't have valid stream_type
2310        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2311            continue;
2312
2313        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2314        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2315            if (dimension > maxJpegDim)
2316                maxJpegDim = dimension;
2317        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2318                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2319                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2320            if (dimension > maxRawDim)
2321                maxRawDim = dimension;
2322        } else {
2323            if (dimension > maxProcessedDim)
2324                maxProcessedDim = dimension;
2325        }
2326    }
2327
2328    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2329            MAX_SIZES_CNT);
2330
2331    //Assume all jpeg dimensions are in processed dimensions.
2332    if (maxJpegDim > maxProcessedDim)
2333        maxProcessedDim = maxJpegDim;
2334    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2335    if (maxProcessedDim > maxRawDim) {
2336        maxRawDim = INT32_MAX;
2337
2338        for (size_t i = 0; i < count; i++) {
2339            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2340                    gCamCapability[mCameraId]->raw_dim[i].height;
2341            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2342                maxRawDim = dimension;
2343        }
2344    }
2345
2346    //Find minimum durations for processed, jpeg, and raw
2347    for (size_t i = 0; i < count; i++) {
2348        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2349                gCamCapability[mCameraId]->raw_dim[i].height) {
2350            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2351            break;
2352        }
2353    }
2354    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2355    for (size_t i = 0; i < count; i++) {
2356        if (maxProcessedDim ==
2357                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2358                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2359            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2360            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2361            break;
2362        }
2363    }
2364}
2365
2366/*===========================================================================
2367 * FUNCTION   : getMinFrameDuration
2368 *
2369 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2370 *              and current request configuration.
2371 *
2372 * PARAMETERS : @request: requset sent by the frameworks
2373 *
2374 * RETURN     : min farme duration for a particular request
2375 *
2376 *==========================================================================*/
2377int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2378{
2379    bool hasJpegStream = false;
2380    bool hasRawStream = false;
2381    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2382        const camera3_stream_t *stream = request->output_buffers[i].stream;
2383        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2384            hasJpegStream = true;
2385        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2386                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2387                stream->format == HAL_PIXEL_FORMAT_RAW16)
2388            hasRawStream = true;
2389    }
2390
2391    if (!hasJpegStream)
2392        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2393    else
2394        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2395}
2396
2397/*===========================================================================
2398 * FUNCTION   : handleBuffersDuringFlushLock
2399 *
2400 * DESCRIPTION: Account for buffers returned from back-end during flush
2401 *              This function is executed while mMutex is held by the caller.
2402 *
2403 * PARAMETERS :
2404 *   @buffer: image buffer for the callback
2405 *
2406 * RETURN     :
2407 *==========================================================================*/
2408void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2409{
2410    bool buffer_found = false;
2411    for (List<PendingBuffersInRequest>::iterator req =
2412            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2413            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2414        for (List<PendingBufferInfo>::iterator i =
2415                req->mPendingBufferList.begin();
2416                i != req->mPendingBufferList.end(); i++) {
2417            if (i->buffer == buffer->buffer) {
2418                mPendingBuffersMap.numPendingBufsAtFlush--;
2419                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2420                    buffer->buffer, req->frame_number,
2421                    mPendingBuffersMap.numPendingBufsAtFlush);
2422                buffer_found = true;
2423                break;
2424            }
2425        }
2426        if (buffer_found) {
2427            break;
2428        }
2429    }
2430    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2431        //signal the flush()
2432        LOGD("All buffers returned to HAL. Continue flush");
2433        pthread_cond_signal(&mBuffersCond);
2434    }
2435}
2436
2437
2438/*===========================================================================
2439 * FUNCTION   : handlePendingReprocResults
2440 *
2441 * DESCRIPTION: check and notify on any pending reprocess results
2442 *
2443 * PARAMETERS :
2444 *   @frame_number   : Pending request frame number
2445 *
2446 * RETURN     : int32_t type of status
2447 *              NO_ERROR  -- success
2448 *              none-zero failure code
2449 *==========================================================================*/
2450int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2451{
2452    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2453            j != mPendingReprocessResultList.end(); j++) {
2454        if (j->frame_number == frame_number) {
2455            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2456
2457            LOGD("Delayed reprocess notify %d",
2458                    frame_number);
2459
2460            for (pendingRequestIterator k = mPendingRequestsList.begin();
2461                    k != mPendingRequestsList.end(); k++) {
2462
2463                if (k->frame_number == j->frame_number) {
2464                    LOGD("Found reprocess frame number %d in pending reprocess List "
2465                            "Take it out!!",
2466                            k->frame_number);
2467
2468                    camera3_capture_result result;
2469                    memset(&result, 0, sizeof(camera3_capture_result));
2470                    result.frame_number = frame_number;
2471                    result.num_output_buffers = 1;
2472                    result.output_buffers =  &j->buffer;
2473                    result.input_buffer = k->input_buffer;
2474                    result.result = k->settings;
2475                    result.partial_result = PARTIAL_RESULT_COUNT;
2476                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2477
2478                    erasePendingRequest(k);
2479                    break;
2480                }
2481            }
2482            mPendingReprocessResultList.erase(j);
2483            break;
2484        }
2485    }
2486    return NO_ERROR;
2487}
2488
2489/*===========================================================================
2490 * FUNCTION   : handleBatchMetadata
2491 *
2492 * DESCRIPTION: Handles metadata buffer callback in batch mode
2493 *
2494 * PARAMETERS : @metadata_buf: metadata buffer
2495 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2496 *                 the meta buf in this method
2497 *
2498 * RETURN     :
2499 *
2500 *==========================================================================*/
2501void QCamera3HardwareInterface::handleBatchMetadata(
2502        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2503{
2504    ATRACE_CALL();
2505
2506    if (NULL == metadata_buf) {
2507        LOGE("metadata_buf is NULL");
2508        return;
2509    }
2510    /* In batch mode, the metdata will contain the frame number and timestamp of
2511     * the last frame in the batch. Eg: a batch containing buffers from request
2512     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2513     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2514     * multiple process_capture_results */
2515    metadata_buffer_t *metadata =
2516            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2517    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2518    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2519    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2520    uint32_t frame_number = 0, urgent_frame_number = 0;
2521    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2522    bool invalid_metadata = false;
2523    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2524    size_t loopCount = 1;
2525
2526    int32_t *p_frame_number_valid =
2527            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2528    uint32_t *p_frame_number =
2529            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2530    int64_t *p_capture_time =
2531            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2532    int32_t *p_urgent_frame_number_valid =
2533            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2534    uint32_t *p_urgent_frame_number =
2535            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2536
2537    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2538            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2539            (NULL == p_urgent_frame_number)) {
2540        LOGE("Invalid metadata");
2541        invalid_metadata = true;
2542    } else {
2543        frame_number_valid = *p_frame_number_valid;
2544        last_frame_number = *p_frame_number;
2545        last_frame_capture_time = *p_capture_time;
2546        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2547        last_urgent_frame_number = *p_urgent_frame_number;
2548    }
2549
2550    /* In batchmode, when no video buffers are requested, set_parms are sent
2551     * for every capture_request. The difference between consecutive urgent
2552     * frame numbers and frame numbers should be used to interpolate the
2553     * corresponding frame numbers and time stamps */
2554    pthread_mutex_lock(&mMutex);
2555    if (urgent_frame_number_valid) {
2556        first_urgent_frame_number =
2557                mPendingBatchMap.valueFor(last_urgent_frame_number);
2558        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2559                first_urgent_frame_number;
2560
2561        LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2562                 urgent_frame_number_valid,
2563                first_urgent_frame_number, last_urgent_frame_number);
2564    }
2565
2566    if (frame_number_valid) {
2567        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2568        frameNumDiff = last_frame_number + 1 -
2569                first_frame_number;
2570        mPendingBatchMap.removeItem(last_frame_number);
2571
2572        LOGD("frm: valid: %d frm_num: %d - %d",
2573                 frame_number_valid,
2574                first_frame_number, last_frame_number);
2575
2576    }
2577    pthread_mutex_unlock(&mMutex);
2578
2579    if (urgent_frame_number_valid || frame_number_valid) {
2580        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2581        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2582            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2583                     urgentFrameNumDiff, last_urgent_frame_number);
2584        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2585            LOGE("frameNumDiff: %d frameNum: %d",
2586                     frameNumDiff, last_frame_number);
2587    }
2588
2589    for (size_t i = 0; i < loopCount; i++) {
2590        /* handleMetadataWithLock is called even for invalid_metadata for
2591         * pipeline depth calculation */
2592        if (!invalid_metadata) {
2593            /* Infer frame number. Batch metadata contains frame number of the
2594             * last frame */
2595            if (urgent_frame_number_valid) {
2596                if (i < urgentFrameNumDiff) {
2597                    urgent_frame_number =
2598                            first_urgent_frame_number + i;
2599                    LOGD("inferred urgent frame_number: %d",
2600                             urgent_frame_number);
2601                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2602                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2603                } else {
2604                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2605                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2606                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2607                }
2608            }
2609
2610            /* Infer frame number. Batch metadata contains frame number of the
2611             * last frame */
2612            if (frame_number_valid) {
2613                if (i < frameNumDiff) {
2614                    frame_number = first_frame_number + i;
2615                    LOGD("inferred frame_number: %d", frame_number);
2616                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2617                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2618                } else {
2619                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2620                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2621                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2622                }
2623            }
2624
2625            if (last_frame_capture_time) {
2626                //Infer timestamp
2627                first_frame_capture_time = last_frame_capture_time -
2628                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2629                capture_time =
2630                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2631                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2632                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2633                LOGD("batch capture_time: %lld, capture_time: %lld",
2634                         last_frame_capture_time, capture_time);
2635            }
2636        }
2637        pthread_mutex_lock(&mMutex);
2638        handleMetadataWithLock(metadata_buf,
2639                false /* free_and_bufdone_meta_buf */);
2640        pthread_mutex_unlock(&mMutex);
2641    }
2642
2643    /* BufDone metadata buffer */
2644    if (free_and_bufdone_meta_buf) {
2645        mMetadataChannel->bufDone(metadata_buf);
2646        free(metadata_buf);
2647    }
2648}
2649
2650/*===========================================================================
2651 * FUNCTION   : handleMetadataWithLock
2652 *
2653 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2654 *
2655 * PARAMETERS : @metadata_buf: metadata buffer
2656 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2657 *                 the meta buf in this method
2658 *
2659 * RETURN     :
2660 *
2661 *==========================================================================*/
2662void QCamera3HardwareInterface::handleMetadataWithLock(
2663    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2664{
2665    ATRACE_CALL();
2666    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2667        //during flush do not send metadata from this thread
2668        LOGD("not sending metadata during flush or when mState is error");
2669        if (free_and_bufdone_meta_buf) {
2670            mMetadataChannel->bufDone(metadata_buf);
2671            free(metadata_buf);
2672        }
2673        return;
2674    }
2675
2676    //not in flush
2677    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2678    int32_t frame_number_valid, urgent_frame_number_valid;
2679    uint32_t frame_number, urgent_frame_number;
2680    int64_t capture_time;
2681    nsecs_t currentSysTime;
2682
2683    int32_t *p_frame_number_valid =
2684            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2685    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2686    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2687    int32_t *p_urgent_frame_number_valid =
2688            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2689    uint32_t *p_urgent_frame_number =
2690            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2691    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2692            metadata) {
2693        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2694                 *p_frame_number_valid, *p_frame_number);
2695    }
2696
2697    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2698            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2699        LOGE("Invalid metadata");
2700        if (free_and_bufdone_meta_buf) {
2701            mMetadataChannel->bufDone(metadata_buf);
2702            free(metadata_buf);
2703        }
2704        goto done_metadata;
2705    }
2706    frame_number_valid =        *p_frame_number_valid;
2707    frame_number =              *p_frame_number;
2708    capture_time =              *p_capture_time;
2709    urgent_frame_number_valid = *p_urgent_frame_number_valid;
2710    urgent_frame_number =       *p_urgent_frame_number;
2711    currentSysTime =            systemTime(CLOCK_MONOTONIC);
2712
2713    // Detect if buffers from any requests are overdue
2714    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2715        if ( (currentSysTime - req.timestamp) >
2716            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2717            for (auto &missed : req.mPendingBufferList) {
2718                LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
2719                    "stream type = %d, stream format = %d",
2720                    frame_number, req.frame_number, missed.buffer,
2721                    missed.stream->stream_type, missed.stream->format);
2722            }
2723        }
2724    }
2725    //Partial result on process_capture_result for timestamp
2726    if (urgent_frame_number_valid) {
2727        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2728           urgent_frame_number, capture_time);
2729
2730        //Recieved an urgent Frame Number, handle it
2731        //using partial results
2732        for (pendingRequestIterator i =
2733                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2734            LOGD("Iterator Frame = %d urgent frame = %d",
2735                 i->frame_number, urgent_frame_number);
2736
2737            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2738                (i->partial_result_cnt == 0)) {
2739                LOGE("Error: HAL missed urgent metadata for frame number %d",
2740                         i->frame_number);
2741            }
2742
2743            if (i->frame_number == urgent_frame_number &&
2744                     i->bUrgentReceived == 0) {
2745
2746                camera3_capture_result_t result;
2747                memset(&result, 0, sizeof(camera3_capture_result_t));
2748
2749                i->partial_result_cnt++;
2750                i->bUrgentReceived = 1;
2751                // Extract 3A metadata
2752                result.result =
2753                    translateCbUrgentMetadataToResultMetadata(metadata);
2754                // Populate metadata result
2755                result.frame_number = urgent_frame_number;
2756                result.num_output_buffers = 0;
2757                result.output_buffers = NULL;
2758                result.partial_result = i->partial_result_cnt;
2759
2760                mCallbackOps->process_capture_result(mCallbackOps, &result);
2761                LOGD("urgent frame_number = %u, capture_time = %lld",
2762                      result.frame_number, capture_time);
2763                free_camera_metadata((camera_metadata_t *)result.result);
2764                break;
2765            }
2766        }
2767    }
2768
2769    if (!frame_number_valid) {
2770        LOGD("Not a valid normal frame number, used as SOF only");
2771        if (free_and_bufdone_meta_buf) {
2772            mMetadataChannel->bufDone(metadata_buf);
2773            free(metadata_buf);
2774        }
2775        goto done_metadata;
2776    }
2777    LOGH("valid frame_number = %u, capture_time = %lld",
2778            frame_number, capture_time);
2779
2780    for (pendingRequestIterator i = mPendingRequestsList.begin();
2781            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2782        // Flush out all entries with less or equal frame numbers.
2783
2784        camera3_capture_result_t result;
2785        memset(&result, 0, sizeof(camera3_capture_result_t));
2786
2787        LOGD("frame_number in the list is %u", i->frame_number);
2788        i->partial_result_cnt++;
2789        result.partial_result = i->partial_result_cnt;
2790
2791        // Check whether any stream buffer corresponding to this is dropped or not
2792        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2793        // The API does not expect a blob buffer to be dropped
2794        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2795            /* Clear notify_msg structure */
2796            camera3_notify_msg_t notify_msg;
2797            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2798            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2799                    j != i->buffers.end(); j++) {
2800                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2801                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2802                for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2803                    if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2804                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2805                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2806                                __func__, i->frame_number, streamID, j->stream->format);
2807                        notify_msg.type = CAMERA3_MSG_ERROR;
2808                        notify_msg.message.error.frame_number = i->frame_number;
2809                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2810                        notify_msg.message.error.error_stream = j->stream;
2811                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2812                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2813                                __func__, i->frame_number, streamID, j->stream->format);
2814                        PendingFrameDropInfo PendingFrameDrop;
2815                        PendingFrameDrop.frame_number=i->frame_number;
2816                        PendingFrameDrop.stream_ID = streamID;
2817                        // Add the Frame drop info to mPendingFrameDropList
2818                        mPendingFrameDropList.push_back(PendingFrameDrop);
2819                   }
2820               }
2821            }
2822        }
2823
2824        // Send empty metadata with already filled buffers for dropped metadata
2825        // and send valid metadata with already filled buffers for current metadata
2826        /* we could hit this case when we either
2827         * 1. have a pending reprocess request or
2828         * 2. miss a metadata buffer callback */
2829        if (i->frame_number < frame_number) {
2830            if (i->input_buffer) {
2831                /* this will be handled in handleInputBufferWithLock */
2832                i++;
2833                continue;
2834            } else {
2835                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
2836                if (free_and_bufdone_meta_buf) {
2837                    mMetadataChannel->bufDone(metadata_buf);
2838                    free(metadata_buf);
2839                }
2840                mState = ERROR;
2841                goto done_metadata;
2842            }
2843        } else {
2844            mPendingLiveRequest--;
2845            /* Clear notify_msg structure */
2846            camera3_notify_msg_t notify_msg;
2847            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2848
2849            // Send shutter notify to frameworks
2850            notify_msg.type = CAMERA3_MSG_SHUTTER;
2851            notify_msg.message.shutter.frame_number = i->frame_number;
2852            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2853            mCallbackOps->notify(mCallbackOps, &notify_msg);
2854
2855            i->timestamp = capture_time;
2856
2857            // Find channel requiring metadata, meaning internal offline postprocess
2858            // is needed.
2859            //TODO: for now, we don't support two streams requiring metadata at the same time.
2860            // (because we are not making copies, and metadata buffer is not reference counted.
2861            bool internalPproc = false;
2862            for (pendingBufferIterator iter = i->buffers.begin();
2863                    iter != i->buffers.end(); iter++) {
2864                if (iter->need_metadata) {
2865                    internalPproc = true;
2866                    QCamera3ProcessingChannel *channel =
2867                            (QCamera3ProcessingChannel *)iter->stream->priv;
2868                    channel->queueReprocMetadata(metadata_buf);
2869                    break;
2870                }
2871            }
2872
2873            result.result = translateFromHalMetadata(metadata,
2874                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2875                    i->capture_intent, i->hybrid_ae_enable, internalPproc, i->fwkCacMode);
2876
2877            saveExifParams(metadata);
2878
2879            if (i->blob_request) {
2880                {
2881                    //Dump tuning metadata if enabled and available
2882                    char prop[PROPERTY_VALUE_MAX];
2883                    memset(prop, 0, sizeof(prop));
2884                    property_get("persist.camera.dumpmetadata", prop, "0");
2885                    int32_t enabled = atoi(prop);
2886                    if (enabled && metadata->is_tuning_params_valid) {
2887                        dumpMetadataToFile(metadata->tuning_params,
2888                               mMetaFrameCount,
2889                               enabled,
2890                               "Snapshot",
2891                               frame_number);
2892                    }
2893                }
2894            }
2895
2896            if (!internalPproc) {
2897                LOGD("couldn't find need_metadata for this metadata");
2898                // Return metadata buffer
2899                if (free_and_bufdone_meta_buf) {
2900                    mMetadataChannel->bufDone(metadata_buf);
2901                    free(metadata_buf);
2902                }
2903            }
2904        }
2905        if (!result.result) {
2906            LOGE("metadata is NULL");
2907        }
2908        result.frame_number = i->frame_number;
2909        result.input_buffer = i->input_buffer;
2910        result.num_output_buffers = 0;
2911        result.output_buffers = NULL;
2912        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2913                    j != i->buffers.end(); j++) {
2914            if (j->buffer) {
2915                result.num_output_buffers++;
2916            }
2917        }
2918
2919        updateFpsInPreviewBuffer(metadata, i->frame_number);
2920
2921        if (result.num_output_buffers > 0) {
2922            camera3_stream_buffer_t *result_buffers =
2923                new camera3_stream_buffer_t[result.num_output_buffers];
2924            if (result_buffers != NULL) {
2925                size_t result_buffers_idx = 0;
2926                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2927                        j != i->buffers.end(); j++) {
2928                    if (j->buffer) {
2929                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2930                                m != mPendingFrameDropList.end(); m++) {
2931                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2932                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2933                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2934                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2935                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
2936                                        frame_number, streamID);
2937                                m = mPendingFrameDropList.erase(m);
2938                                break;
2939                            }
2940                        }
2941                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
2942                        result_buffers[result_buffers_idx++] = *(j->buffer);
2943                        free(j->buffer);
2944                        j->buffer = NULL;
2945                    }
2946                }
2947                result.output_buffers = result_buffers;
2948                mCallbackOps->process_capture_result(mCallbackOps, &result);
2949                LOGD("meta frame_number = %u, capture_time = %lld",
2950                        result.frame_number, i->timestamp);
2951                free_camera_metadata((camera_metadata_t *)result.result);
2952                delete[] result_buffers;
2953            }else {
2954                LOGE("Fatal error: out of memory");
2955            }
2956        } else {
2957            mCallbackOps->process_capture_result(mCallbackOps, &result);
2958            LOGD("meta frame_number = %u, capture_time = %lld",
2959                    result.frame_number, i->timestamp);
2960            free_camera_metadata((camera_metadata_t *)result.result);
2961        }
2962
2963        i = erasePendingRequest(i);
2964
2965        if (!mPendingReprocessResultList.empty()) {
2966            handlePendingReprocResults(frame_number + 1);
2967        }
2968    }
2969
2970done_metadata:
2971    for (pendingRequestIterator i = mPendingRequestsList.begin();
2972            i != mPendingRequestsList.end() ;i++) {
2973        i->pipeline_depth++;
2974    }
2975    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
2976    unblockRequestIfNecessary();
2977}
2978
2979/*===========================================================================
2980 * FUNCTION   : hdrPlusPerfLock
2981 *
2982 * DESCRIPTION: perf lock for HDR+ using custom intent
2983 *
2984 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2985 *
2986 * RETURN     : None
2987 *
2988 *==========================================================================*/
2989void QCamera3HardwareInterface::hdrPlusPerfLock(
2990        mm_camera_super_buf_t *metadata_buf)
2991{
2992    if (NULL == metadata_buf) {
2993        LOGE("metadata_buf is NULL");
2994        return;
2995    }
2996    metadata_buffer_t *metadata =
2997            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2998    int32_t *p_frame_number_valid =
2999            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3000    uint32_t *p_frame_number =
3001            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3002
3003    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3004        LOGE("%s: Invalid metadata", __func__);
3005        return;
3006    }
3007
3008    //acquire perf lock for 5 sec after the last HDR frame is captured
3009    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3010        if ((p_frame_number != NULL) &&
3011                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3012            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3013        }
3014    }
3015
3016    //release lock after perf lock timer is expired. If lock is already released,
3017    //isTimerReset returns false
3018    if (m_perfLock.isTimerReset()) {
3019        mLastCustIntentFrmNum = -1;
3020        m_perfLock.lock_rel_timed();
3021    }
3022}
3023
3024/*===========================================================================
3025 * FUNCTION   : handleInputBufferWithLock
3026 *
3027 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3028 *
3029 * PARAMETERS : @frame_number: frame number of the input buffer
3030 *
3031 * RETURN     :
3032 *
3033 *==========================================================================*/
3034void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3035{
3036    ATRACE_CALL();
3037    pendingRequestIterator i = mPendingRequestsList.begin();
3038    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3039        i++;
3040    }
3041    if (i != mPendingRequestsList.end() && i->input_buffer) {
3042        //found the right request
3043        if (!i->shutter_notified) {
3044            CameraMetadata settings;
3045            camera3_notify_msg_t notify_msg;
3046            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3047            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3048            if(i->settings) {
3049                settings = i->settings;
3050                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3051                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3052                } else {
3053                    LOGE("No timestamp in input settings! Using current one.");
3054                }
3055            } else {
3056                LOGE("Input settings missing!");
3057            }
3058
3059            notify_msg.type = CAMERA3_MSG_SHUTTER;
3060            notify_msg.message.shutter.frame_number = frame_number;
3061            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3062            mCallbackOps->notify(mCallbackOps, &notify_msg);
3063            i->shutter_notified = true;
3064            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3065                        i->frame_number, notify_msg.message.shutter.timestamp);
3066        }
3067
3068        if (i->input_buffer->release_fence != -1) {
3069           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3070           close(i->input_buffer->release_fence);
3071           if (rc != OK) {
3072               LOGE("input buffer sync wait failed %d", rc);
3073           }
3074        }
3075
3076        camera3_capture_result result;
3077        memset(&result, 0, sizeof(camera3_capture_result));
3078        result.frame_number = frame_number;
3079        result.result = i->settings;
3080        result.input_buffer = i->input_buffer;
3081        result.partial_result = PARTIAL_RESULT_COUNT;
3082
3083        mCallbackOps->process_capture_result(mCallbackOps, &result);
3084        LOGD("Input request metadata and input buffer frame_number = %u",
3085                        i->frame_number);
3086        i = erasePendingRequest(i);
3087    } else {
3088        LOGE("Could not find input request for frame number %d", frame_number);
3089    }
3090}
3091
3092/*===========================================================================
3093 * FUNCTION   : handleBufferWithLock
3094 *
3095 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3096 *
3097 * PARAMETERS : @buffer: image buffer for the callback
3098 *              @frame_number: frame number of the image buffer
3099 *
3100 * RETURN     :
3101 *
3102 *==========================================================================*/
3103void QCamera3HardwareInterface::handleBufferWithLock(
3104    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3105{
3106    ATRACE_CALL();
3107    /* Nothing to be done during error state */
3108    if ((ERROR == mState) || (DEINIT == mState)) {
3109        return;
3110    }
3111    if (mFlushPerf) {
3112        handleBuffersDuringFlushLock(buffer);
3113        return;
3114    }
3115    //not in flush
3116    // If the frame number doesn't exist in the pending request list,
3117    // directly send the buffer to the frameworks, and update pending buffers map
3118    // Otherwise, book-keep the buffer.
3119    pendingRequestIterator i = mPendingRequestsList.begin();
3120    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3121        i++;
3122    }
3123    if (i == mPendingRequestsList.end()) {
3124        // Verify all pending requests frame_numbers are greater
3125        for (pendingRequestIterator j = mPendingRequestsList.begin();
3126                j != mPendingRequestsList.end(); j++) {
3127            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3128                LOGW("Error: pending live frame number %d is smaller than %d",
3129                         j->frame_number, frame_number);
3130            }
3131        }
3132        camera3_capture_result_t result;
3133        memset(&result, 0, sizeof(camera3_capture_result_t));
3134        result.result = NULL;
3135        result.frame_number = frame_number;
3136        result.num_output_buffers = 1;
3137        result.partial_result = 0;
3138        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3139                m != mPendingFrameDropList.end(); m++) {
3140            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3141            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3142            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3143                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3144                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3145                         frame_number, streamID);
3146                m = mPendingFrameDropList.erase(m);
3147                break;
3148            }
3149        }
3150        result.output_buffers = buffer;
3151        LOGH("result frame_number = %d, buffer = %p",
3152                 frame_number, buffer->buffer);
3153
3154        mPendingBuffersMap.removeBuf(buffer->buffer);
3155
3156        mCallbackOps->process_capture_result(mCallbackOps, &result);
3157    } else {
3158        if (i->input_buffer) {
3159            CameraMetadata settings;
3160            camera3_notify_msg_t notify_msg;
3161            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3162            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3163            if(i->settings) {
3164                settings = i->settings;
3165                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3166                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3167                } else {
3168                    LOGW("No timestamp in input settings! Using current one.");
3169                }
3170            } else {
3171                LOGE("Input settings missing!");
3172            }
3173
3174            notify_msg.type = CAMERA3_MSG_SHUTTER;
3175            notify_msg.message.shutter.frame_number = frame_number;
3176            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3177
3178            if (i->input_buffer->release_fence != -1) {
3179               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3180               close(i->input_buffer->release_fence);
3181               if (rc != OK) {
3182                   LOGE("input buffer sync wait failed %d", rc);
3183               }
3184            }
3185            mPendingBuffersMap.removeBuf(buffer->buffer);
3186
3187            bool notifyNow = true;
3188            for (pendingRequestIterator j = mPendingRequestsList.begin();
3189                    j != mPendingRequestsList.end(); j++) {
3190                if (j->frame_number < frame_number) {
3191                    notifyNow = false;
3192                    break;
3193                }
3194            }
3195
3196            if (notifyNow) {
3197                camera3_capture_result result;
3198                memset(&result, 0, sizeof(camera3_capture_result));
3199                result.frame_number = frame_number;
3200                result.result = i->settings;
3201                result.input_buffer = i->input_buffer;
3202                result.num_output_buffers = 1;
3203                result.output_buffers = buffer;
3204                result.partial_result = PARTIAL_RESULT_COUNT;
3205
3206                mCallbackOps->notify(mCallbackOps, &notify_msg);
3207                mCallbackOps->process_capture_result(mCallbackOps, &result);
3208                LOGD("Notify reprocess now %d!", frame_number);
3209                i = erasePendingRequest(i);
3210            } else {
3211                // Cache reprocess result for later
3212                PendingReprocessResult pendingResult;
3213                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3214                pendingResult.notify_msg = notify_msg;
3215                pendingResult.buffer = *buffer;
3216                pendingResult.frame_number = frame_number;
3217                mPendingReprocessResultList.push_back(pendingResult);
3218                LOGD("Cache reprocess result %d!", frame_number);
3219            }
3220        } else {
3221            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3222                j != i->buffers.end(); j++) {
3223                if (j->stream == buffer->stream) {
3224                    if (j->buffer != NULL) {
3225                        LOGE("Error: buffer is already set");
3226                    } else {
3227                        j->buffer = (camera3_stream_buffer_t *)malloc(
3228                            sizeof(camera3_stream_buffer_t));
3229                        *(j->buffer) = *buffer;
3230                        LOGH("cache buffer %p at result frame_number %u",
3231                             buffer->buffer, frame_number);
3232                    }
3233                }
3234            }
3235        }
3236    }
3237}
3238
3239/*===========================================================================
3240 * FUNCTION   : unblockRequestIfNecessary
3241 *
3242 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3243 *              that mMutex is held when this function is called.
3244 *
3245 * PARAMETERS :
3246 *
3247 * RETURN     :
3248 *
3249 *==========================================================================*/
3250void QCamera3HardwareInterface::unblockRequestIfNecessary()
3251{
3252   // Unblock process_capture_request
3253   pthread_cond_signal(&mRequestCond);
3254}
3255
3256
3257/*===========================================================================
3258 * FUNCTION   : processCaptureRequest
3259 *
3260 * DESCRIPTION: process a capture request from camera service
3261 *
3262 * PARAMETERS :
3263 *   @request : request from framework to process
3264 *
3265 * RETURN     :
3266 *
3267 *==========================================================================*/
3268int QCamera3HardwareInterface::processCaptureRequest(
3269                    camera3_capture_request_t *request)
3270{
3271    ATRACE_CALL();
3272    int rc = NO_ERROR;
3273    int32_t request_id;
3274    CameraMetadata meta;
3275    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3276    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3277    bool isVidBufRequested = false;
3278    camera3_stream_buffer_t *pInputBuffer = NULL;
3279
3280    pthread_mutex_lock(&mMutex);
3281
3282    // Validate current state
3283    switch (mState) {
3284        case CONFIGURED:
3285        case STARTED:
3286            /* valid state */
3287            break;
3288
3289        case ERROR:
3290            pthread_mutex_unlock(&mMutex);
3291            handleCameraDeviceError();
3292            return -ENODEV;
3293
3294        default:
3295            LOGE("Invalid state %d", mState);
3296            pthread_mutex_unlock(&mMutex);
3297            return -ENODEV;
3298    }
3299
3300    rc = validateCaptureRequest(request);
3301    if (rc != NO_ERROR) {
3302        LOGE("incoming request is not valid");
3303        pthread_mutex_unlock(&mMutex);
3304        return rc;
3305    }
3306
3307    meta = request->settings;
3308
3309    // For first capture request, send capture intent, and
3310    // stream on all streams
3311    if (mState == CONFIGURED) {
3312        // send an unconfigure to the backend so that the isp
3313        // resources are deallocated
3314        if (!mFirstConfiguration) {
3315            cam_stream_size_info_t stream_config_info;
3316            int32_t hal_version = CAM_HAL_V3;
3317            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3318            stream_config_info.buffer_info.min_buffers =
3319                    MIN_INFLIGHT_REQUESTS;
3320            stream_config_info.buffer_info.max_buffers =
3321                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3322            clear_metadata_buffer(mParameters);
3323            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3324                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3325            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3326                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3327            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3328                    mParameters);
3329            if (rc < 0) {
3330                LOGE("set_parms for unconfigure failed");
3331                pthread_mutex_unlock(&mMutex);
3332                return rc;
3333            }
3334        }
3335        m_perfLock.lock_acq();
3336        /* get eis information for stream configuration */
3337        cam_is_type_t is_type;
3338        char is_type_value[PROPERTY_VALUE_MAX];
3339        property_get("persist.camera.is_type", is_type_value, "0");
3340        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3341
3342        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3343            int32_t hal_version = CAM_HAL_V3;
3344            uint8_t captureIntent =
3345                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3346            mCaptureIntent = captureIntent;
3347            clear_metadata_buffer(mParameters);
3348            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3349            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3350        }
3351
3352        //If EIS is enabled, turn it on for video
3353        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3354        int32_t vsMode;
3355        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3356        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3357            rc = BAD_VALUE;
3358        }
3359
3360        //IS type will be 0 unless EIS is supported. If EIS is supported
3361        //it could either be 1 or 4 depending on the stream and video size
3362        if (setEis) {
3363            if (!m_bEisSupportedSize) {
3364                is_type = IS_TYPE_DIS;
3365            } else {
3366                is_type = IS_TYPE_EIS_2_0;
3367            }
3368            mStreamConfigInfo.is_type = is_type;
3369        } else {
3370            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3371        }
3372
3373        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3374                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3375        int32_t tintless_value = 1;
3376        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3377                CAM_INTF_PARM_TINTLESS, tintless_value);
3378        //Disable CDS for HFR mode or if DIS/EIS is on.
3379        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3380        //after every configure_stream
3381        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3382                (m_bIsVideo)) {
3383            int32_t cds = CAM_CDS_MODE_OFF;
3384            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3385                    CAM_INTF_PARM_CDS_MODE, cds))
3386                LOGE("Failed to disable CDS for HFR mode");
3387
3388        }
3389        setMobicat();
3390
3391        /* Set fps and hfr mode while sending meta stream info so that sensor
3392         * can configure appropriate streaming mode */
3393        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3394        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3395            rc = setHalFpsRange(meta, mParameters);
3396            if (rc != NO_ERROR) {
3397                LOGE("setHalFpsRange failed");
3398            }
3399        }
3400        if (meta.exists(ANDROID_CONTROL_MODE)) {
3401            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3402            rc = extractSceneMode(meta, metaMode, mParameters);
3403            if (rc != NO_ERROR) {
3404                LOGE("extractSceneMode failed");
3405            }
3406        }
3407
3408        //TODO: validate the arguments, HSV scenemode should have only the
3409        //advertised fps ranges
3410
3411        /*set the capture intent, hal version, tintless, stream info,
3412         *and disenable parameters to the backend*/
3413        LOGD("set_parms META_STREAM_INFO " );
3414        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3415            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3416                    "Format:%d",
3417                    mStreamConfigInfo.type[i],
3418                    mStreamConfigInfo.stream_sizes[i].width,
3419                    mStreamConfigInfo.stream_sizes[i].height,
3420                    mStreamConfigInfo.postprocess_mask[i],
3421                    mStreamConfigInfo.format[i]);
3422        }
3423        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3424                    mParameters);
3425        if (rc < 0) {
3426            LOGE("set_parms failed for hal version, stream info");
3427        }
3428
3429        cam_dimension_t sensor_dim;
3430        memset(&sensor_dim, 0, sizeof(sensor_dim));
3431        rc = getSensorOutputSize(sensor_dim);
3432        if (rc != NO_ERROR) {
3433            LOGE("Failed to get sensor output size");
3434            pthread_mutex_unlock(&mMutex);
3435            goto error_exit;
3436        }
3437
3438        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3439                gCamCapability[mCameraId]->active_array_size.height,
3440                sensor_dim.width, sensor_dim.height);
3441
3442        /* Set batchmode before initializing channel. Since registerBuffer
3443         * internally initializes some of the channels, better set batchmode
3444         * even before first register buffer */
3445        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3446            it != mStreamInfo.end(); it++) {
3447            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3448            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3449                    && mBatchSize) {
3450                rc = channel->setBatchSize(mBatchSize);
3451                //Disable per frame map unmap for HFR/batchmode case
3452                rc |= channel->setPerFrameMapUnmap(false);
3453                if (NO_ERROR != rc) {
3454                    LOGE("Channel init failed %d", rc);
3455                    pthread_mutex_unlock(&mMutex);
3456                    goto error_exit;
3457                }
3458            }
3459        }
3460
3461        //First initialize all streams
3462        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3463            it != mStreamInfo.end(); it++) {
3464            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3465            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3466               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3467               setEis)
3468                rc = channel->initialize(is_type);
3469            else {
3470                rc = channel->initialize(IS_TYPE_NONE);
3471            }
3472            if (NO_ERROR != rc) {
3473                LOGE("Channel initialization failed %d", rc);
3474                pthread_mutex_unlock(&mMutex);
3475                goto error_exit;
3476            }
3477        }
3478
3479        if (mRawDumpChannel) {
3480            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3481            if (rc != NO_ERROR) {
3482                LOGE("Error: Raw Dump Channel init failed");
3483                pthread_mutex_unlock(&mMutex);
3484                goto error_exit;
3485            }
3486        }
3487        if (mSupportChannel) {
3488            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3489            if (rc < 0) {
3490                LOGE("Support channel initialization failed");
3491                pthread_mutex_unlock(&mMutex);
3492                goto error_exit;
3493            }
3494        }
3495        if (mAnalysisChannel) {
3496            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3497            if (rc < 0) {
3498                LOGE("Analysis channel initialization failed");
3499                pthread_mutex_unlock(&mMutex);
3500                goto error_exit;
3501            }
3502        }
3503        if (mDummyBatchChannel) {
3504            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3505            if (rc < 0) {
3506                LOGE("mDummyBatchChannel setBatchSize failed");
3507                pthread_mutex_unlock(&mMutex);
3508                goto error_exit;
3509            }
3510            rc = mDummyBatchChannel->initialize(is_type);
3511            if (rc < 0) {
3512                LOGE("mDummyBatchChannel initialization failed");
3513                pthread_mutex_unlock(&mMutex);
3514                goto error_exit;
3515            }
3516        }
3517
3518        // Set bundle info
3519        rc = setBundleInfo();
3520        if (rc < 0) {
3521            LOGE("setBundleInfo failed %d", rc);
3522            pthread_mutex_unlock(&mMutex);
3523            goto error_exit;
3524        }
3525
3526        //Then start them.
3527        LOGH("Start META Channel");
3528        rc = mMetadataChannel->start();
3529        if (rc < 0) {
3530            LOGE("META channel start failed");
3531            pthread_mutex_unlock(&mMutex);
3532            goto error_exit;
3533        }
3534
3535        if (mAnalysisChannel) {
3536            rc = mAnalysisChannel->start();
3537            if (rc < 0) {
3538                LOGE("Analysis channel start failed");
3539                mMetadataChannel->stop();
3540                pthread_mutex_unlock(&mMutex);
3541                goto error_exit;
3542            }
3543        }
3544
3545        if (mSupportChannel) {
3546            rc = mSupportChannel->start();
3547            if (rc < 0) {
3548                LOGE("Support channel start failed");
3549                mMetadataChannel->stop();
3550                /* Although support and analysis are mutually exclusive today
3551                   adding it in anycase for future proofing */
3552                if (mAnalysisChannel) {
3553                    mAnalysisChannel->stop();
3554                }
3555                pthread_mutex_unlock(&mMutex);
3556                goto error_exit;
3557            }
3558        }
3559        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3560            it != mStreamInfo.end(); it++) {
3561            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3562            LOGH("Start Processing Channel mask=%d",
3563                     channel->getStreamTypeMask());
3564            rc = channel->start();
3565            if (rc < 0) {
3566                LOGE("channel start failed");
3567                pthread_mutex_unlock(&mMutex);
3568                goto error_exit;
3569            }
3570        }
3571
3572        if (mRawDumpChannel) {
3573            LOGD("Starting raw dump stream");
3574            rc = mRawDumpChannel->start();
3575            if (rc != NO_ERROR) {
3576                LOGE("Error Starting Raw Dump Channel");
3577                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3578                      it != mStreamInfo.end(); it++) {
3579                    QCamera3Channel *channel =
3580                        (QCamera3Channel *)(*it)->stream->priv;
3581                    LOGH("Stopping Processing Channel mask=%d",
3582                        channel->getStreamTypeMask());
3583                    channel->stop();
3584                }
3585                if (mSupportChannel)
3586                    mSupportChannel->stop();
3587                if (mAnalysisChannel) {
3588                    mAnalysisChannel->stop();
3589                }
3590                mMetadataChannel->stop();
3591                pthread_mutex_unlock(&mMutex);
3592                goto error_exit;
3593            }
3594        }
3595
3596        if (mChannelHandle) {
3597
3598            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3599                    mChannelHandle);
3600            if (rc != NO_ERROR) {
3601                LOGE("start_channel failed %d", rc);
3602                pthread_mutex_unlock(&mMutex);
3603                goto error_exit;
3604            }
3605        }
3606
3607
3608        goto no_error;
3609error_exit:
3610        m_perfLock.lock_rel();
3611        return rc;
3612no_error:
3613        m_perfLock.lock_rel();
3614
3615        mWokenUpByDaemon = false;
3616        mPendingLiveRequest = 0;
3617        mFirstConfiguration = false;
3618        enablePowerHint();
3619    }
3620
3621    uint32_t frameNumber = request->frame_number;
3622    cam_stream_ID_t streamID;
3623
3624    if (mFlushPerf) {
3625        //we cannot accept any requests during flush
3626        LOGE("process_capture_request cannot proceed during flush");
3627        pthread_mutex_unlock(&mMutex);
3628        return NO_ERROR; //should return an error
3629    }
3630
3631    if (meta.exists(ANDROID_REQUEST_ID)) {
3632        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3633        mCurrentRequestId = request_id;
3634        LOGD("Received request with id: %d", request_id);
3635    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3636        LOGE("Unable to find request id field, \
3637                & no previous id available");
3638        pthread_mutex_unlock(&mMutex);
3639        return NAME_NOT_FOUND;
3640    } else {
3641        LOGD("Re-using old request id");
3642        request_id = mCurrentRequestId;
3643    }
3644
3645    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3646                                    request->num_output_buffers,
3647                                    request->input_buffer,
3648                                    frameNumber);
3649    // Acquire all request buffers first
3650    streamID.num_streams = 0;
3651    int blob_request = 0;
3652    uint32_t snapshotStreamId = 0;
3653    for (size_t i = 0; i < request->num_output_buffers; i++) {
3654        const camera3_stream_buffer_t& output = request->output_buffers[i];
3655        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3656
3657        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3658            //Call function to store local copy of jpeg data for encode params.
3659            blob_request = 1;
3660            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3661        }
3662
3663        if (output.acquire_fence != -1) {
3664           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3665           close(output.acquire_fence);
3666           if (rc != OK) {
3667              LOGE("sync wait failed %d", rc);
3668              pthread_mutex_unlock(&mMutex);
3669              return rc;
3670           }
3671        }
3672
3673        streamID.streamID[streamID.num_streams] =
3674            channel->getStreamID(channel->getStreamTypeMask());
3675        streamID.num_streams++;
3676
3677        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3678            isVidBufRequested = true;
3679        }
3680    }
3681
3682    if (blob_request) {
3683        KPI_ATRACE_INT("SNAPSHOT", 1);
3684    }
3685    if (blob_request && mRawDumpChannel) {
3686        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3687        streamID.streamID[streamID.num_streams] =
3688            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3689        streamID.num_streams++;
3690    }
3691
3692    if(request->input_buffer == NULL) {
3693        /* Parse the settings:
3694         * - For every request in NORMAL MODE
3695         * - For every request in HFR mode during preview only case
3696         * - For first request of every batch in HFR mode during video
3697         * recording. In batchmode the same settings except frame number is
3698         * repeated in each request of the batch.
3699         */
3700        if (!mBatchSize ||
3701           (mBatchSize && !isVidBufRequested) ||
3702           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3703            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3704            if (rc < 0) {
3705                LOGE("fail to set frame parameters");
3706                pthread_mutex_unlock(&mMutex);
3707                return rc;
3708            }
3709        }
3710        /* For batchMode HFR, setFrameParameters is not called for every
3711         * request. But only frame number of the latest request is parsed.
3712         * Keep track of first and last frame numbers in a batch so that
3713         * metadata for the frame numbers of batch can be duplicated in
3714         * handleBatchMetadta */
3715        if (mBatchSize) {
3716            if (!mToBeQueuedVidBufs) {
3717                //start of the batch
3718                mFirstFrameNumberInBatch = request->frame_number;
3719            }
3720            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3721                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3722                LOGE("Failed to set the frame number in the parameters");
3723                return BAD_VALUE;
3724            }
3725        }
3726        if (mNeedSensorRestart) {
3727            /* Unlock the mutex as restartSensor waits on the channels to be
3728             * stopped, which in turn calls stream callback functions -
3729             * handleBufferWithLock and handleMetadataWithLock */
3730            pthread_mutex_unlock(&mMutex);
3731            rc = dynamicUpdateMetaStreamInfo();
3732            if (rc != NO_ERROR) {
3733                LOGE("Restarting the sensor failed");
3734                return BAD_VALUE;
3735            }
3736            mNeedSensorRestart = false;
3737            pthread_mutex_lock(&mMutex);
3738        }
3739    } else {
3740
3741        if (request->input_buffer->acquire_fence != -1) {
3742           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3743           close(request->input_buffer->acquire_fence);
3744           if (rc != OK) {
3745              LOGE("input buffer sync wait failed %d", rc);
3746              pthread_mutex_unlock(&mMutex);
3747              return rc;
3748           }
3749        }
3750    }
3751
3752    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3753        mLastCustIntentFrmNum = frameNumber;
3754    }
3755    /* Update pending request list and pending buffers map */
3756    PendingRequestInfo pendingRequest;
3757    pendingRequestIterator latestRequest;
3758    pendingRequest.frame_number = frameNumber;
3759    pendingRequest.num_buffers = request->num_output_buffers;
3760    pendingRequest.request_id = request_id;
3761    pendingRequest.blob_request = blob_request;
3762    pendingRequest.timestamp = 0;
3763    pendingRequest.bUrgentReceived = 0;
3764    if (request->input_buffer) {
3765        pendingRequest.input_buffer =
3766                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3767        *(pendingRequest.input_buffer) = *(request->input_buffer);
3768        pInputBuffer = pendingRequest.input_buffer;
3769    } else {
3770       pendingRequest.input_buffer = NULL;
3771       pInputBuffer = NULL;
3772    }
3773
3774    pendingRequest.pipeline_depth = 0;
3775    pendingRequest.partial_result_cnt = 0;
3776    extractJpegMetadata(mCurJpegMeta, request);
3777    pendingRequest.jpegMetadata = mCurJpegMeta;
3778    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3779    pendingRequest.shutter_notified = false;
3780
3781    //extract capture intent
3782    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3783        mCaptureIntent =
3784                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3785    }
3786    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
3787        mHybridAeEnable =
3788                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
3789    }
3790    pendingRequest.capture_intent = mCaptureIntent;
3791    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
3792
3793    //extract CAC info
3794    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
3795        mCacMode =
3796                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
3797    }
3798    pendingRequest.fwkCacMode = mCacMode;
3799
3800    PendingBuffersInRequest bufsForCurRequest;
3801    bufsForCurRequest.frame_number = frameNumber;
3802    // Mark current timestamp for the new request
3803    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
3804
3805    for (size_t i = 0; i < request->num_output_buffers; i++) {
3806        RequestedBufferInfo requestedBuf;
3807        memset(&requestedBuf, 0, sizeof(requestedBuf));
3808        requestedBuf.stream = request->output_buffers[i].stream;
3809        requestedBuf.buffer = NULL;
3810        pendingRequest.buffers.push_back(requestedBuf);
3811
3812        // Add to buffer handle the pending buffers list
3813        PendingBufferInfo bufferInfo;
3814        bufferInfo.buffer = request->output_buffers[i].buffer;
3815        bufferInfo.stream = request->output_buffers[i].stream;
3816        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
3817        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3818        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3819            frameNumber, bufferInfo.buffer,
3820            channel->getStreamTypeMask(), bufferInfo.stream->format);
3821    }
3822    // Add this request packet into mPendingBuffersMap
3823    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
3824    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
3825        mPendingBuffersMap.get_num_overall_buffers());
3826
3827    latestRequest = mPendingRequestsList.insert(
3828            mPendingRequestsList.end(), pendingRequest);
3829    if(mFlush) {
3830        pthread_mutex_unlock(&mMutex);
3831        return NO_ERROR;
3832    }
3833
3834    // Notify metadata channel we receive a request
3835    mMetadataChannel->request(NULL, frameNumber);
3836
3837    if(request->input_buffer != NULL){
3838        LOGD("Input request, frame_number %d", frameNumber);
3839        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3840        if (NO_ERROR != rc) {
3841            LOGE("fail to set reproc parameters");
3842            pthread_mutex_unlock(&mMutex);
3843            return rc;
3844        }
3845    }
3846
3847    // Call request on other streams
3848    uint32_t streams_need_metadata = 0;
3849    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3850    for (size_t i = 0; i < request->num_output_buffers; i++) {
3851        const camera3_stream_buffer_t& output = request->output_buffers[i];
3852        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3853
3854        if (channel == NULL) {
3855            LOGW("invalid channel pointer for stream");
3856            continue;
3857        }
3858
3859        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3860            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
3861                      output.buffer, request->input_buffer, frameNumber);
3862            if(request->input_buffer != NULL){
3863                rc = channel->request(output.buffer, frameNumber,
3864                        pInputBuffer, &mReprocMeta);
3865                if (rc < 0) {
3866                    LOGE("Fail to request on picture channel");
3867                    pthread_mutex_unlock(&mMutex);
3868                    return rc;
3869                }
3870            } else {
3871                LOGD("snapshot request with buffer %p, frame_number %d",
3872                         output.buffer, frameNumber);
3873                if (!request->settings) {
3874                    rc = channel->request(output.buffer, frameNumber,
3875                            NULL, mPrevParameters);
3876                } else {
3877                    rc = channel->request(output.buffer, frameNumber,
3878                            NULL, mParameters);
3879                }
3880                if (rc < 0) {
3881                    LOGE("Fail to request on picture channel");
3882                    pthread_mutex_unlock(&mMutex);
3883                    return rc;
3884                }
3885                pendingBufferIter->need_metadata = true;
3886                streams_need_metadata++;
3887            }
3888        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3889            bool needMetadata = false;
3890
3891            if (m_perfLock.isPerfLockTimedAcquired()) {
3892                if (m_perfLock.isTimerReset())
3893                {
3894                    m_perfLock.lock_rel_timed();
3895                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3896                }
3897            } else {
3898                m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3899            }
3900
3901            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3902            rc = yuvChannel->request(output.buffer, frameNumber,
3903                    pInputBuffer,
3904                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3905            if (rc < 0) {
3906                LOGE("Fail to request on YUV channel");
3907                pthread_mutex_unlock(&mMutex);
3908                return rc;
3909            }
3910            pendingBufferIter->need_metadata = needMetadata;
3911            if (needMetadata)
3912                streams_need_metadata += 1;
3913            LOGD("calling YUV channel request, need_metadata is %d",
3914                     needMetadata);
3915        } else {
3916            LOGD("request with buffer %p, frame_number %d",
3917                  output.buffer, frameNumber);
3918            /* Set perf lock for API-2 zsl */
3919            if (IS_USAGE_ZSL(output.stream->usage)) {
3920                if (m_perfLock.isPerfLockTimedAcquired()) {
3921                    if (m_perfLock.isTimerReset())
3922                    {
3923                        m_perfLock.lock_rel_timed();
3924                        m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3925                    }
3926                } else {
3927                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3928                }
3929            }
3930
3931            rc = channel->request(output.buffer, frameNumber);
3932            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3933                    && mBatchSize) {
3934                mToBeQueuedVidBufs++;
3935                if (mToBeQueuedVidBufs == mBatchSize) {
3936                    channel->queueBatchBuf();
3937                }
3938            }
3939            if (rc < 0) {
3940                LOGE("request failed");
3941                pthread_mutex_unlock(&mMutex);
3942                return rc;
3943            }
3944        }
3945        pendingBufferIter++;
3946    }
3947
3948    //If 2 streams have need_metadata set to true, fail the request, unless
3949    //we copy/reference count the metadata buffer
3950    if (streams_need_metadata > 1) {
3951        LOGE("not supporting request in which two streams requires"
3952                " 2 HAL metadata for reprocessing");
3953        pthread_mutex_unlock(&mMutex);
3954        return -EINVAL;
3955    }
3956
3957    if(request->input_buffer == NULL) {
3958        /* Set the parameters to backend:
3959         * - For every request in NORMAL MODE
3960         * - For every request in HFR mode during preview only case
3961         * - Once every batch in HFR mode during video recording
3962         */
3963        if (!mBatchSize ||
3964           (mBatchSize && !isVidBufRequested) ||
3965           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3966            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3967                     mBatchSize, isVidBufRequested,
3968                    mToBeQueuedVidBufs);
3969            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3970                    mParameters);
3971            if (rc < 0) {
3972                LOGE("set_parms failed");
3973            }
3974            /* reset to zero coz, the batch is queued */
3975            mToBeQueuedVidBufs = 0;
3976            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3977        }
3978        mPendingLiveRequest++;
3979    }
3980
3981    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3982
3983    mState = STARTED;
3984    // Added a timed condition wait
3985    struct timespec ts;
3986    uint8_t isValidTimeout = 1;
3987    rc = clock_gettime(CLOCK_REALTIME, &ts);
3988    if (rc < 0) {
3989      isValidTimeout = 0;
3990      LOGE("Error reading the real time clock!!");
3991    }
3992    else {
3993      // Make timeout as 5 sec for request to be honored
3994      ts.tv_sec += 5;
3995    }
3996    //Block on conditional variable
3997    if (mBatchSize) {
3998        /* For HFR, more buffers are dequeued upfront to improve the performance */
3999        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4000        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4001    }
4002    if (m_perfLock.isPerfLockTimedAcquired() && m_perfLock.isTimerReset())
4003        m_perfLock.lock_rel_timed();
4004
4005    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
4006            (mState != ERROR) && (mState != DEINIT)) {
4007        if (!isValidTimeout) {
4008            LOGD("Blocking on conditional wait");
4009            pthread_cond_wait(&mRequestCond, &mMutex);
4010        }
4011        else {
4012            LOGD("Blocking on timed conditional wait");
4013            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4014            if (rc == ETIMEDOUT) {
4015                rc = -ENODEV;
4016                LOGE("Unblocked on timeout!!!!");
4017                break;
4018            }
4019        }
4020        LOGD("Unblocked");
4021        if (mWokenUpByDaemon) {
4022            mWokenUpByDaemon = false;
4023            if (mPendingLiveRequest < maxInFlightRequests)
4024                break;
4025        }
4026    }
4027    pthread_mutex_unlock(&mMutex);
4028
4029    return rc;
4030}
4031
4032/*===========================================================================
4033 * FUNCTION   : dump
4034 *
4035 * DESCRIPTION:
4036 *
4037 * PARAMETERS :
4038 *
4039 *
4040 * RETURN     :
4041 *==========================================================================*/
4042void QCamera3HardwareInterface::dump(int fd)
4043{
4044    pthread_mutex_lock(&mMutex);
4045    dprintf(fd, "\n Camera HAL3 information Begin \n");
4046
4047    dprintf(fd, "\nNumber of pending requests: %zu \n",
4048        mPendingRequestsList.size());
4049    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4050    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4051    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4052    for(pendingRequestIterator i = mPendingRequestsList.begin();
4053            i != mPendingRequestsList.end(); i++) {
4054        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4055        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4056        i->input_buffer);
4057    }
4058    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4059                mPendingBuffersMap.get_num_overall_buffers());
4060    dprintf(fd, "-------+------------------\n");
4061    dprintf(fd, " Frame | Stream type mask \n");
4062    dprintf(fd, "-------+------------------\n");
4063    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4064        for(auto &j : req.mPendingBufferList) {
4065            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4066            dprintf(fd, " %5d | %11d \n",
4067                    req.frame_number, channel->getStreamTypeMask());
4068        }
4069    }
4070    dprintf(fd, "-------+------------------\n");
4071
4072    dprintf(fd, "\nPending frame drop list: %zu\n",
4073        mPendingFrameDropList.size());
4074    dprintf(fd, "-------+-----------\n");
4075    dprintf(fd, " Frame | Stream ID \n");
4076    dprintf(fd, "-------+-----------\n");
4077    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4078        i != mPendingFrameDropList.end(); i++) {
4079        dprintf(fd, " %5d | %9d \n",
4080            i->frame_number, i->stream_ID);
4081    }
4082    dprintf(fd, "-------+-----------\n");
4083
4084    dprintf(fd, "\n Camera HAL3 information End \n");
4085
4086    /* use dumpsys media.camera as trigger to send update debug level event */
4087    mUpdateDebugLevel = true;
4088    pthread_mutex_unlock(&mMutex);
4089    return;
4090}
4091
4092/*===========================================================================
4093 * FUNCTION   : flush
4094 *
4095 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4096 *              conditionally restarts channels
4097 *
4098 * PARAMETERS :
4099 *  @ restartChannels: re-start all channels
4100 *
4101 *
4102 * RETURN     :
4103 *          0 on success
4104 *          Error code on failure
4105 *==========================================================================*/
4106int QCamera3HardwareInterface::flush(bool restartChannels)
4107{
4108    KPI_ATRACE_CALL();
4109    int32_t rc = NO_ERROR;
4110
4111    LOGD("Unblocking Process Capture Request");
4112    pthread_mutex_lock(&mMutex);
4113    mFlush = true;
4114    pthread_mutex_unlock(&mMutex);
4115
4116    rc = stopAllChannels();
4117    if (rc < 0) {
4118        LOGE("stopAllChannels failed");
4119        return rc;
4120    }
4121    if (mChannelHandle) {
4122        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4123                mChannelHandle);
4124    }
4125
4126    // Reset bundle info
4127    rc = setBundleInfo();
4128    if (rc < 0) {
4129        LOGE("setBundleInfo failed %d", rc);
4130        return rc;
4131    }
4132
4133    // Mutex Lock
4134    pthread_mutex_lock(&mMutex);
4135
4136    // Unblock process_capture_request
4137    mPendingLiveRequest = 0;
4138    pthread_cond_signal(&mRequestCond);
4139
4140    rc = notifyErrorForPendingRequests();
4141    if (rc < 0) {
4142        LOGE("notifyErrorForPendingRequests failed");
4143        pthread_mutex_unlock(&mMutex);
4144        return rc;
4145    }
4146
4147    mFlush = false;
4148
4149    // Start the Streams/Channels
4150    if (restartChannels) {
4151        rc = startAllChannels();
4152        if (rc < 0) {
4153            LOGE("startAllChannels failed");
4154            pthread_mutex_unlock(&mMutex);
4155            return rc;
4156        }
4157    }
4158
4159    if (mChannelHandle) {
4160        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4161                    mChannelHandle);
4162        if (rc < 0) {
4163            LOGE("start_channel failed");
4164            pthread_mutex_unlock(&mMutex);
4165            return rc;
4166        }
4167    }
4168
4169    pthread_mutex_unlock(&mMutex);
4170
4171    return 0;
4172}
4173
4174/*===========================================================================
4175 * FUNCTION   : flushPerf
4176 *
4177 * DESCRIPTION: This is the performance optimization version of flush that does
4178 *              not use stream off, rather flushes the system
4179 *
4180 * PARAMETERS :
4181 *
4182 *
4183 * RETURN     : 0 : success
4184 *              -EINVAL: input is malformed (device is not valid)
4185 *              -ENODEV: if the device has encountered a serious error
4186 *==========================================================================*/
4187int QCamera3HardwareInterface::flushPerf()
4188{
4189    ATRACE_CALL();
4190    int32_t rc = 0;
4191    struct timespec timeout;
4192    bool timed_wait = false;
4193
4194    pthread_mutex_lock(&mMutex);
4195    mFlushPerf = true;
4196    mPendingBuffersMap.numPendingBufsAtFlush =
4197        mPendingBuffersMap.get_num_overall_buffers();
4198    LOGD("Calling flush. Wait for %d buffers to return",
4199        mPendingBuffersMap.numPendingBufsAtFlush);
4200
4201    /* send the flush event to the backend */
4202    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4203    if (rc < 0) {
4204        LOGE("Error in flush: IOCTL failure");
4205        mFlushPerf = false;
4206        pthread_mutex_unlock(&mMutex);
4207        return -ENODEV;
4208    }
4209
4210    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4211        LOGD("No pending buffers in HAL, return flush");
4212        mFlushPerf = false;
4213        pthread_mutex_unlock(&mMutex);
4214        return rc;
4215    }
4216
4217    /* wait on a signal that buffers were received */
4218    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4219    if (rc < 0) {
4220        LOGE("Error reading the real time clock, cannot use timed wait");
4221    } else {
4222        timeout.tv_sec += FLUSH_TIMEOUT;
4223        timed_wait = true;
4224    }
4225
4226    //Block on conditional variable
4227    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4228        LOGD("Waiting on mBuffersCond");
4229        if (!timed_wait) {
4230            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4231            if (rc != 0) {
4232                 LOGE("pthread_cond_wait failed due to rc = %s",
4233                        strerror(rc));
4234                 break;
4235            }
4236        } else {
4237            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4238            if (rc != 0) {
4239                LOGE("pthread_cond_timedwait failed due to rc = %s",
4240                            strerror(rc));
4241                break;
4242            }
4243        }
4244    }
4245    if (rc != 0) {
4246        mFlushPerf = false;
4247        pthread_mutex_unlock(&mMutex);
4248        return -ENODEV;
4249    }
4250
4251    LOGD("Received buffers, now safe to return them");
4252
4253    //make sure the channels handle flush
4254    //currently only required for the picture channel to release snapshot resources
4255    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4256            it != mStreamInfo.end(); it++) {
4257        QCamera3Channel *channel = (*it)->channel;
4258        if (channel) {
4259            rc = channel->flush();
4260            if (rc) {
4261               LOGE("Flushing the channels failed with error %d", rc);
4262               // even though the channel flush failed we need to continue and
4263               // return the buffers we have to the framework, however the return
4264               // value will be an error
4265               rc = -ENODEV;
4266            }
4267        }
4268    }
4269
4270    /* notify the frameworks and send errored results */
4271    rc = notifyErrorForPendingRequests();
4272    if (rc < 0) {
4273        LOGE("notifyErrorForPendingRequests failed");
4274        pthread_mutex_unlock(&mMutex);
4275        return rc;
4276    }
4277
4278    //unblock process_capture_request
4279    mPendingLiveRequest = 0;
4280    unblockRequestIfNecessary();
4281
4282    mFlushPerf = false;
4283    pthread_mutex_unlock(&mMutex);
4284    LOGD ("Flush Operation complete. rc = %d", rc);
4285    return rc;
4286}
4287
4288/*===========================================================================
4289 * FUNCTION   : handleCameraDeviceError
4290 *
4291 * DESCRIPTION: This function calls internal flush and notifies the error to
4292 *              framework and updates the state variable.
4293 *
4294 * PARAMETERS : None
4295 *
4296 * RETURN     : NO_ERROR on Success
4297 *              Error code on failure
4298 *==========================================================================*/
4299int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4300{
4301    int32_t rc = NO_ERROR;
4302
4303    pthread_mutex_lock(&mMutex);
4304    if (mState != ERROR) {
4305        //if mState != ERROR, nothing to be done
4306        pthread_mutex_unlock(&mMutex);
4307        return NO_ERROR;
4308    }
4309    pthread_mutex_unlock(&mMutex);
4310
4311    rc = flush(false /* restart channels */);
4312    if (NO_ERROR != rc) {
4313        LOGE("internal flush to handle mState = ERROR failed");
4314    }
4315
4316    pthread_mutex_lock(&mMutex);
4317    mState = DEINIT;
4318    pthread_mutex_unlock(&mMutex);
4319
4320    camera3_notify_msg_t notify_msg;
4321    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4322    notify_msg.type = CAMERA3_MSG_ERROR;
4323    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4324    notify_msg.message.error.error_stream = NULL;
4325    notify_msg.message.error.frame_number = 0;
4326    mCallbackOps->notify(mCallbackOps, &notify_msg);
4327
4328    return rc;
4329}
4330
4331/*===========================================================================
4332 * FUNCTION   : captureResultCb
4333 *
4334 * DESCRIPTION: Callback handler for all capture result
4335 *              (streams, as well as metadata)
4336 *
4337 * PARAMETERS :
4338 *   @metadata : metadata information
4339 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4340 *               NULL if metadata.
4341 *
4342 * RETURN     : NONE
4343 *==========================================================================*/
4344void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4345                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4346{
4347    if (metadata_buf) {
4348        if (mBatchSize) {
4349            handleBatchMetadata(metadata_buf,
4350                    true /* free_and_bufdone_meta_buf */);
4351        } else { /* mBatchSize = 0 */
4352            hdrPlusPerfLock(metadata_buf);
4353            pthread_mutex_lock(&mMutex);
4354            handleMetadataWithLock(metadata_buf,
4355                    true /* free_and_bufdone_meta_buf */);
4356            pthread_mutex_unlock(&mMutex);
4357        }
4358    } else if (isInputBuffer) {
4359        pthread_mutex_lock(&mMutex);
4360        handleInputBufferWithLock(frame_number);
4361        pthread_mutex_unlock(&mMutex);
4362    } else {
4363        pthread_mutex_lock(&mMutex);
4364        handleBufferWithLock(buffer, frame_number);
4365        pthread_mutex_unlock(&mMutex);
4366    }
4367    return;
4368}
4369
4370/*===========================================================================
4371 * FUNCTION   : getReprocessibleOutputStreamId
4372 *
4373 * DESCRIPTION: Get source output stream id for the input reprocess stream
4374 *              based on size and format, which would be the largest
4375 *              output stream if an input stream exists.
4376 *
4377 * PARAMETERS :
4378 *   @id      : return the stream id if found
4379 *
4380 * RETURN     : int32_t type of status
4381 *              NO_ERROR  -- success
4382 *              none-zero failure code
4383 *==========================================================================*/
4384int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4385{
4386    /* check if any output or bidirectional stream with the same size and format
4387       and return that stream */
4388    if ((mInputStreamInfo.dim.width > 0) &&
4389            (mInputStreamInfo.dim.height > 0)) {
4390        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4391                it != mStreamInfo.end(); it++) {
4392
4393            camera3_stream_t *stream = (*it)->stream;
4394            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4395                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4396                    (stream->format == mInputStreamInfo.format)) {
4397                // Usage flag for an input stream and the source output stream
4398                // may be different.
4399                LOGD("Found reprocessible output stream! %p", *it);
4400                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4401                         stream->usage, mInputStreamInfo.usage);
4402
4403                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4404                if (channel != NULL && channel->mStreams[0]) {
4405                    id = channel->mStreams[0]->getMyServerID();
4406                    return NO_ERROR;
4407                }
4408            }
4409        }
4410    } else {
4411        LOGD("No input stream, so no reprocessible output stream");
4412    }
4413    return NAME_NOT_FOUND;
4414}
4415
4416/*===========================================================================
4417 * FUNCTION   : lookupFwkName
4418 *
4419 * DESCRIPTION: In case the enum is not same in fwk and backend
4420 *              make sure the parameter is correctly propogated
4421 *
4422 * PARAMETERS  :
4423 *   @arr      : map between the two enums
4424 *   @len      : len of the map
4425 *   @hal_name : name of the hal_parm to map
4426 *
4427 * RETURN     : int type of status
4428 *              fwk_name  -- success
4429 *              none-zero failure code
4430 *==========================================================================*/
4431template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4432        size_t len, halType hal_name)
4433{
4434
4435    for (size_t i = 0; i < len; i++) {
4436        if (arr[i].hal_name == hal_name) {
4437            return arr[i].fwk_name;
4438        }
4439    }
4440
4441    /* Not able to find matching framework type is not necessarily
4442     * an error case. This happens when mm-camera supports more attributes
4443     * than the frameworks do */
4444    LOGH("Cannot find matching framework type");
4445    return NAME_NOT_FOUND;
4446}
4447
4448/*===========================================================================
4449 * FUNCTION   : lookupHalName
4450 *
4451 * DESCRIPTION: In case the enum is not same in fwk and backend
4452 *              make sure the parameter is correctly propogated
4453 *
4454 * PARAMETERS  :
4455 *   @arr      : map between the two enums
4456 *   @len      : len of the map
4457 *   @fwk_name : name of the hal_parm to map
4458 *
4459 * RETURN     : int32_t type of status
4460 *              hal_name  -- success
4461 *              none-zero failure code
4462 *==========================================================================*/
4463template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4464        size_t len, fwkType fwk_name)
4465{
4466    for (size_t i = 0; i < len; i++) {
4467        if (arr[i].fwk_name == fwk_name) {
4468            return arr[i].hal_name;
4469        }
4470    }
4471
4472    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4473    return NAME_NOT_FOUND;
4474}
4475
4476/*===========================================================================
4477 * FUNCTION   : lookupProp
4478 *
4479 * DESCRIPTION: lookup a value by its name
4480 *
4481 * PARAMETERS :
4482 *   @arr     : map between the two enums
4483 *   @len     : size of the map
4484 *   @name    : name to be looked up
4485 *
4486 * RETURN     : Value if found
4487 *              CAM_CDS_MODE_MAX if not found
4488 *==========================================================================*/
4489template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4490        size_t len, const char *name)
4491{
4492    if (name) {
4493        for (size_t i = 0; i < len; i++) {
4494            if (!strcmp(arr[i].desc, name)) {
4495                return arr[i].val;
4496            }
4497        }
4498    }
4499    return CAM_CDS_MODE_MAX;
4500}
4501
4502/*===========================================================================
4503 *
4504 * DESCRIPTION:
4505 *
4506 * PARAMETERS :
4507 *   @metadata : metadata information from callback
4508 *   @timestamp: metadata buffer timestamp
4509 *   @request_id: request id
4510 *   @jpegMetadata: additional jpeg metadata
4511 *   @hybrid_ae_enable: whether hybrid ae is enabled
4512 *   @pprocDone: whether internal offline postprocsesing is done
4513 *
4514 * RETURN     : camera_metadata_t*
4515 *              metadata in a format specified by fwk
4516 *==========================================================================*/
4517camera_metadata_t*
4518QCamera3HardwareInterface::translateFromHalMetadata(
4519                                 metadata_buffer_t *metadata,
4520                                 nsecs_t timestamp,
4521                                 int32_t request_id,
4522                                 const CameraMetadata& jpegMetadata,
4523                                 uint8_t pipeline_depth,
4524                                 uint8_t capture_intent,
4525                                 uint8_t hybrid_ae_enable,
4526                                 bool pprocDone,
4527                                 uint8_t fwk_cacMode)
4528{
4529    CameraMetadata camMetadata;
4530    camera_metadata_t *resultMetadata;
4531
4532    if (jpegMetadata.entryCount())
4533        camMetadata.append(jpegMetadata);
4534
4535    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4536    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4537    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4538    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4539    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4540
4541    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4542        int64_t fwk_frame_number = *frame_number;
4543        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4544    }
4545
4546    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4547        int32_t fps_range[2];
4548        fps_range[0] = (int32_t)float_range->min_fps;
4549        fps_range[1] = (int32_t)float_range->max_fps;
4550        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4551                                      fps_range, 2);
4552        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4553             fps_range[0], fps_range[1]);
4554    }
4555
4556    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4557        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4558    }
4559
4560    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4561        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4562                METADATA_MAP_SIZE(SCENE_MODES_MAP),
4563                *sceneMode);
4564        if (NAME_NOT_FOUND != val) {
4565            uint8_t fwkSceneMode = (uint8_t)val;
4566            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4567            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4568                     fwkSceneMode);
4569        }
4570    }
4571
4572    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4573        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4574        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4575    }
4576
4577    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4578        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4579        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4580    }
4581
4582    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4583        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4584        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4585    }
4586
4587    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4588            CAM_INTF_META_EDGE_MODE, metadata) {
4589        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4590    }
4591
4592    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4593        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4594        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4595    }
4596
4597    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4598        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4599    }
4600
4601    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4602        if (0 <= *flashState) {
4603            uint8_t fwk_flashState = (uint8_t) *flashState;
4604            if (!gCamCapability[mCameraId]->flash_available) {
4605                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4606            }
4607            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4608        }
4609    }
4610
4611    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4612        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4613        if (NAME_NOT_FOUND != val) {
4614            uint8_t fwk_flashMode = (uint8_t)val;
4615            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4616        }
4617    }
4618
4619    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4620        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4621        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4622    }
4623
4624    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4625        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4626    }
4627
4628    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4629        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4630    }
4631
4632    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4633        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4634    }
4635
4636    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4637        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4638        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4639    }
4640
4641    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4642        uint8_t fwk_videoStab = (uint8_t) *videoStab;
4643        LOGD("fwk_videoStab = %d", fwk_videoStab);
4644        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4645    } else {
4646        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
4647        // and so hardcoding the Video Stab result to OFF mode.
4648        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4649        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
4650        LOGD("%s: EIS result default to OFF mode", __func__);
4651    }
4652
4653    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4654        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4655        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4656    }
4657
4658    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4659        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4660    }
4661
4662    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4663        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4664
4665        LOGD("dynamicblackLevel = %f %f %f %f",
4666          blackLevelSourcePattern->cam_black_level[0],
4667          blackLevelSourcePattern->cam_black_level[1],
4668          blackLevelSourcePattern->cam_black_level[2],
4669          blackLevelSourcePattern->cam_black_level[3]);
4670    }
4671
4672    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4673        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4674        float fwk_blackLevelInd[4];
4675
4676        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4677        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4678        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4679        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4680
4681        LOGD("applied dynamicblackLevel = %f %f %f %f",
4682          blackLevelAppliedPattern->cam_black_level[0],
4683          blackLevelAppliedPattern->cam_black_level[1],
4684          blackLevelAppliedPattern->cam_black_level[2],
4685          blackLevelAppliedPattern->cam_black_level[3]);
4686        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4687
4688        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
4689        // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4690        // depth space.
4691        fwk_blackLevelInd[0] /= 64.0;
4692        fwk_blackLevelInd[1] /= 64.0;
4693        fwk_blackLevelInd[2] /= 64.0;
4694        fwk_blackLevelInd[3] /= 64.0;
4695        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4696    }
4697
4698    // Fixed whitelevel is used by ISP/Sensor
4699    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4700            &gCamCapability[mCameraId]->white_level, 1);
4701
4702    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4703            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4704        int32_t scalerCropRegion[4];
4705        scalerCropRegion[0] = hScalerCropRegion->left;
4706        scalerCropRegion[1] = hScalerCropRegion->top;
4707        scalerCropRegion[2] = hScalerCropRegion->width;
4708        scalerCropRegion[3] = hScalerCropRegion->height;
4709
4710        // Adjust crop region from sensor output coordinate system to active
4711        // array coordinate system.
4712        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4713                scalerCropRegion[2], scalerCropRegion[3]);
4714
4715        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4716    }
4717
4718    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4719        LOGD("sensorExpTime = %lld", *sensorExpTime);
4720        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4721    }
4722
4723    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4724            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4725        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
4726        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4727    }
4728
4729    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4730            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4731        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
4732        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4733                sensorRollingShutterSkew, 1);
4734    }
4735
4736    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4737        LOGD("sensorSensitivity = %d", *sensorSensitivity);
4738        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4739
4740        //calculate the noise profile based on sensitivity
4741        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4742        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4743        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4744        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4745            noise_profile[i]   = noise_profile_S;
4746            noise_profile[i+1] = noise_profile_O;
4747        }
4748        LOGD("noise model entry (S, O) is (%f, %f)",
4749                noise_profile_S, noise_profile_O);
4750        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4751                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4752    }
4753
4754    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
4755        int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
4756        camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
4757    }
4758
4759    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4760        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4761        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4762    }
4763
4764    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4765        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4766                *faceDetectMode);
4767        if (NAME_NOT_FOUND != val) {
4768            uint8_t fwk_faceDetectMode = (uint8_t)val;
4769            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4770
4771            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4772                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4773                        CAM_INTF_META_FACE_DETECTION, metadata) {
4774                    uint8_t numFaces = MIN(
4775                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4776                    int32_t faceIds[MAX_ROI];
4777                    uint8_t faceScores[MAX_ROI];
4778                    int32_t faceRectangles[MAX_ROI * 4];
4779                    int32_t faceLandmarks[MAX_ROI * 6];
4780                    size_t j = 0, k = 0;
4781
4782                    for (size_t i = 0; i < numFaces; i++) {
4783                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4784                        // Adjust crop region from sensor output coordinate system to active
4785                        // array coordinate system.
4786                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4787                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4788                                rect.width, rect.height);
4789
4790                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4791                                faceRectangles+j, -1);
4792
4793                        j+= 4;
4794                    }
4795                    if (numFaces <= 0) {
4796                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4797                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4798                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4799                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4800                    }
4801
4802                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4803                            numFaces);
4804                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4805                            faceRectangles, numFaces * 4U);
4806                    if (fwk_faceDetectMode ==
4807                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4808                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
4809                                CAM_INTF_META_FACE_LANDMARK, metadata) {
4810
4811                            for (size_t i = 0; i < numFaces; i++) {
4812                                // Map the co-ordinate sensor output coordinate system to active
4813                                // array coordinate system.
4814                                mCropRegionMapper.toActiveArray(
4815                                        landmarks->face_landmarks[i].left_eye_center.x,
4816                                        landmarks->face_landmarks[i].left_eye_center.y);
4817                                mCropRegionMapper.toActiveArray(
4818                                        landmarks->face_landmarks[i].right_eye_center.x,
4819                                        landmarks->face_landmarks[i].right_eye_center.y);
4820                                mCropRegionMapper.toActiveArray(
4821                                        landmarks->face_landmarks[i].mouth_center.x,
4822                                        landmarks->face_landmarks[i].mouth_center.y);
4823
4824                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
4825                                k+= 6;
4826                            }
4827                        }
4828
4829                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4830                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4831                                faceLandmarks, numFaces * 6U);
4832                   }
4833                }
4834            }
4835        }
4836    }
4837
4838    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4839        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4840        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4841    }
4842
4843    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4844            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4845        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4846        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4847    }
4848
4849    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4850            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4851        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4852                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4853    }
4854
4855    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4856            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4857        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4858                CAM_MAX_SHADING_MAP_HEIGHT);
4859        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4860                CAM_MAX_SHADING_MAP_WIDTH);
4861        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4862                lensShadingMap->lens_shading, 4U * map_width * map_height);
4863    }
4864
4865    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4866        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4867        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4868    }
4869
4870    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4871        //Populate CAM_INTF_META_TONEMAP_CURVES
4872        /* ch0 = G, ch 1 = B, ch 2 = R*/
4873        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4874            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
4875                     tonemap->tonemap_points_cnt,
4876                    CAM_MAX_TONEMAP_CURVE_SIZE);
4877            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4878        }
4879
4880        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4881                        &tonemap->curves[0].tonemap_points[0][0],
4882                        tonemap->tonemap_points_cnt * 2);
4883
4884        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4885                        &tonemap->curves[1].tonemap_points[0][0],
4886                        tonemap->tonemap_points_cnt * 2);
4887
4888        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4889                        &tonemap->curves[2].tonemap_points[0][0],
4890                        tonemap->tonemap_points_cnt * 2);
4891    }
4892
4893    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4894            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4895        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4896                CC_GAINS_COUNT);
4897    }
4898
4899    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4900            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4901        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4902                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4903                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4904    }
4905
4906    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4907            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4908        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4909            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
4910                     toneCurve->tonemap_points_cnt,
4911                    CAM_MAX_TONEMAP_CURVE_SIZE);
4912            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4913        }
4914        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4915                (float*)toneCurve->curve.tonemap_points,
4916                toneCurve->tonemap_points_cnt * 2);
4917    }
4918
4919    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4920            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4921        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4922                predColorCorrectionGains->gains, 4);
4923    }
4924
4925    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4926            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4927        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4928                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4929                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4930    }
4931
4932    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4933        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4934    }
4935
4936    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4937        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4938        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4939    }
4940
4941    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4942        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4943        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4944    }
4945
4946    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4947        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4948                *effectMode);
4949        if (NAME_NOT_FOUND != val) {
4950            uint8_t fwk_effectMode = (uint8_t)val;
4951            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4952        }
4953    }
4954
4955    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4956            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4957        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4958                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4959        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4960            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4961        }
4962        int32_t fwk_testPatternData[4];
4963        fwk_testPatternData[0] = testPatternData->r;
4964        fwk_testPatternData[3] = testPatternData->b;
4965        switch (gCamCapability[mCameraId]->color_arrangement) {
4966        case CAM_FILTER_ARRANGEMENT_RGGB:
4967        case CAM_FILTER_ARRANGEMENT_GRBG:
4968            fwk_testPatternData[1] = testPatternData->gr;
4969            fwk_testPatternData[2] = testPatternData->gb;
4970            break;
4971        case CAM_FILTER_ARRANGEMENT_GBRG:
4972        case CAM_FILTER_ARRANGEMENT_BGGR:
4973            fwk_testPatternData[2] = testPatternData->gr;
4974            fwk_testPatternData[1] = testPatternData->gb;
4975            break;
4976        default:
4977            LOGE("color arrangement %d is not supported",
4978                gCamCapability[mCameraId]->color_arrangement);
4979            break;
4980        }
4981        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4982    }
4983
4984    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4985        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4986    }
4987
4988    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4989        String8 str((const char *)gps_methods);
4990        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4991    }
4992
4993    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4994        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4995    }
4996
4997    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4998        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4999    }
5000
5001    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5002        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5003        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5004    }
5005
5006    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5007        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5008        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5009    }
5010
5011    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5012        int32_t fwk_thumb_size[2];
5013        fwk_thumb_size[0] = thumb_size->width;
5014        fwk_thumb_size[1] = thumb_size->height;
5015        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5016    }
5017
5018    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5019        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5020                privateData,
5021                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5022    }
5023
5024    if (metadata->is_tuning_params_valid) {
5025        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5026        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5027        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5028
5029
5030        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5031                sizeof(uint32_t));
5032        data += sizeof(uint32_t);
5033
5034        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5035                sizeof(uint32_t));
5036        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5037        data += sizeof(uint32_t);
5038
5039        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5040                sizeof(uint32_t));
5041        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5042        data += sizeof(uint32_t);
5043
5044        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5045                sizeof(uint32_t));
5046        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5047        data += sizeof(uint32_t);
5048
5049        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5050                sizeof(uint32_t));
5051        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5052        data += sizeof(uint32_t);
5053
5054        metadata->tuning_params.tuning_mod3_data_size = 0;
5055        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5056                sizeof(uint32_t));
5057        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5058        data += sizeof(uint32_t);
5059
5060        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5061                TUNING_SENSOR_DATA_MAX);
5062        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5063                count);
5064        data += count;
5065
5066        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5067                TUNING_VFE_DATA_MAX);
5068        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5069                count);
5070        data += count;
5071
5072        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5073                TUNING_CPP_DATA_MAX);
5074        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5075                count);
5076        data += count;
5077
5078        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5079                TUNING_CAC_DATA_MAX);
5080        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5081                count);
5082        data += count;
5083
5084        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5085                (int32_t *)(void *)tuning_meta_data_blob,
5086                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5087    }
5088
5089    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5090            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5091        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5092                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5093                NEUTRAL_COL_POINTS);
5094    }
5095
5096    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5097        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5098        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5099    }
5100
5101    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5102        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5103        // Adjust crop region from sensor output coordinate system to active
5104        // array coordinate system.
5105        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5106                hAeRegions->rect.width, hAeRegions->rect.height);
5107
5108        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5109        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5110                REGIONS_TUPLE_COUNT);
5111        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5112                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5113                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5114                hAeRegions->rect.height);
5115    }
5116
5117    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5118        uint8_t fwk_afState = (uint8_t) *afState;
5119        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5120        LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5121    }
5122
5123    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5124        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5125    }
5126
5127    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5128        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5129    }
5130
5131    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5132        uint8_t fwk_lensState = *lensState;
5133        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5134    }
5135
5136    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5137        /*af regions*/
5138        int32_t afRegions[REGIONS_TUPLE_COUNT];
5139        // Adjust crop region from sensor output coordinate system to active
5140        // array coordinate system.
5141        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5142                hAfRegions->rect.width, hAfRegions->rect.height);
5143
5144        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5145        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5146                REGIONS_TUPLE_COUNT);
5147        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5148                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5149                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5150                hAfRegions->rect.height);
5151    }
5152
5153    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5154        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5155                *hal_ab_mode);
5156        if (NAME_NOT_FOUND != val) {
5157            uint8_t fwk_ab_mode = (uint8_t)val;
5158            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5159        }
5160    }
5161
5162    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5163        int val = lookupFwkName(SCENE_MODES_MAP,
5164                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5165        if (NAME_NOT_FOUND != val) {
5166            uint8_t fwkBestshotMode = (uint8_t)val;
5167            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5168            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5169        } else {
5170            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5171        }
5172    }
5173
5174    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5175         uint8_t fwk_mode = (uint8_t) *mode;
5176         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5177    }
5178
5179    /* Constant metadata values to be update*/
5180    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5181    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5182
5183    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5184    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5185
5186    int32_t hotPixelMap[2];
5187    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5188
5189    // CDS
5190    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5191        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5192    }
5193
5194    // TNR
5195    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5196        uint8_t tnr_enable       = tnr->denoise_enable;
5197        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5198
5199        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5200        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5201    }
5202
5203    // Reprocess crop data
5204    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5205        uint8_t cnt = crop_data->num_of_streams;
5206        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5207            // mm-qcamera-daemon only posts crop_data for streams
5208            // not linked to pproc. So no valid crop metadata is not
5209            // necessarily an error case.
5210            LOGD("No valid crop metadata entries");
5211        } else {
5212            uint32_t reproc_stream_id;
5213            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5214                LOGD("No reprocessible stream found, ignore crop data");
5215            } else {
5216                int rc = NO_ERROR;
5217                Vector<int32_t> roi_map;
5218                int32_t *crop = new int32_t[cnt*4];
5219                if (NULL == crop) {
5220                   rc = NO_MEMORY;
5221                }
5222                if (NO_ERROR == rc) {
5223                    int32_t streams_found = 0;
5224                    for (size_t i = 0; i < cnt; i++) {
5225                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5226                            if (pprocDone) {
5227                                // HAL already does internal reprocessing,
5228                                // either via reprocessing before JPEG encoding,
5229                                // or offline postprocessing for pproc bypass case.
5230                                crop[0] = 0;
5231                                crop[1] = 0;
5232                                crop[2] = mInputStreamInfo.dim.width;
5233                                crop[3] = mInputStreamInfo.dim.height;
5234                            } else {
5235                                crop[0] = crop_data->crop_info[i].crop.left;
5236                                crop[1] = crop_data->crop_info[i].crop.top;
5237                                crop[2] = crop_data->crop_info[i].crop.width;
5238                                crop[3] = crop_data->crop_info[i].crop.height;
5239                            }
5240                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5241                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5242                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5243                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5244                            streams_found++;
5245                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5246                                    crop[0], crop[1], crop[2], crop[3]);
5247                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5248                                    crop_data->crop_info[i].roi_map.left,
5249                                    crop_data->crop_info[i].roi_map.top,
5250                                    crop_data->crop_info[i].roi_map.width,
5251                                    crop_data->crop_info[i].roi_map.height);
5252                            break;
5253
5254                       }
5255                    }
5256                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5257                            &streams_found, 1);
5258                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5259                            crop, (size_t)(streams_found * 4));
5260                    if (roi_map.array()) {
5261                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5262                                roi_map.array(), roi_map.size());
5263                    }
5264               }
5265               if (crop) {
5266                   delete [] crop;
5267               }
5268            }
5269        }
5270    }
5271
5272    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5273        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5274        // so hardcoding the CAC result to OFF mode.
5275        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5276        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5277    } else {
5278        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5279            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5280                    *cacMode);
5281            if (NAME_NOT_FOUND != val) {
5282                uint8_t resultCacMode = (uint8_t)val;
5283                // check whether CAC result from CB is equal to Framework set CAC mode
5284                // If not equal then set the CAC mode came in corresponding request
5285                if (fwk_cacMode != resultCacMode) {
5286                    resultCacMode = fwk_cacMode;
5287                }
5288                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5289                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5290            } else {
5291                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5292            }
5293        }
5294    }
5295
5296    // Post blob of cam_cds_data through vendor tag.
5297    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5298        uint8_t cnt = cdsInfo->num_of_streams;
5299        cam_cds_data_t cdsDataOverride;
5300        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5301        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5302        cdsDataOverride.num_of_streams = 1;
5303        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5304            uint32_t reproc_stream_id;
5305            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5306                LOGD("No reprocessible stream found, ignore cds data");
5307            } else {
5308                for (size_t i = 0; i < cnt; i++) {
5309                    if (cdsInfo->cds_info[i].stream_id ==
5310                            reproc_stream_id) {
5311                        cdsDataOverride.cds_info[0].cds_enable =
5312                                cdsInfo->cds_info[i].cds_enable;
5313                        break;
5314                    }
5315                }
5316            }
5317        } else {
5318            LOGD("Invalid stream count %d in CDS_DATA", cnt);
5319        }
5320        camMetadata.update(QCAMERA3_CDS_INFO,
5321                (uint8_t *)&cdsDataOverride,
5322                sizeof(cam_cds_data_t));
5323    }
5324
5325    // Ldaf calibration data
5326    if (!mLdafCalibExist) {
5327        IF_META_AVAILABLE(uint32_t, ldafCalib,
5328                CAM_INTF_META_LDAF_EXIF, metadata) {
5329            mLdafCalibExist = true;
5330            mLdafCalib[0] = ldafCalib[0];
5331            mLdafCalib[1] = ldafCalib[1];
5332            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5333                    ldafCalib[0], ldafCalib[1]);
5334        }
5335    }
5336
5337    resultMetadata = camMetadata.release();
5338    return resultMetadata;
5339}
5340
5341/*===========================================================================
5342 * FUNCTION   : saveExifParams
5343 *
5344 * DESCRIPTION:
5345 *
5346 * PARAMETERS :
5347 *   @metadata : metadata information from callback
5348 *
5349 * RETURN     : none
5350 *
5351 *==========================================================================*/
5352void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5353{
5354    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5355            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5356        if (mExifParams.debug_params) {
5357            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5358            mExifParams.debug_params->ae_debug_params_valid = TRUE;
5359        }
5360    }
5361    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5362            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5363        if (mExifParams.debug_params) {
5364            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5365            mExifParams.debug_params->awb_debug_params_valid = TRUE;
5366        }
5367    }
5368    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5369            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5370        if (mExifParams.debug_params) {
5371            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5372            mExifParams.debug_params->af_debug_params_valid = TRUE;
5373        }
5374    }
5375    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5376            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5377        if (mExifParams.debug_params) {
5378            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5379            mExifParams.debug_params->asd_debug_params_valid = TRUE;
5380        }
5381    }
5382    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5383            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5384        if (mExifParams.debug_params) {
5385            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5386            mExifParams.debug_params->stats_debug_params_valid = TRUE;
5387        }
5388    }
5389}
5390
5391/*===========================================================================
5392 * FUNCTION   : get3AExifParams
5393 *
5394 * DESCRIPTION:
5395 *
5396 * PARAMETERS : none
5397 *
5398 *
5399 * RETURN     : mm_jpeg_exif_params_t
5400 *
5401 *==========================================================================*/
5402mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5403{
5404    return mExifParams;
5405}
5406
5407/*===========================================================================
5408 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
5409 *
5410 * DESCRIPTION:
5411 *
5412 * PARAMETERS :
5413 *   @metadata : metadata information from callback
5414 *
5415 * RETURN     : camera_metadata_t*
5416 *              metadata in a format specified by fwk
5417 *==========================================================================*/
5418camera_metadata_t*
5419QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5420                                (metadata_buffer_t *metadata)
5421{
5422    CameraMetadata camMetadata;
5423    camera_metadata_t *resultMetadata;
5424
5425
5426    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5427        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5428        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5429        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
5430    }
5431
5432    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5433        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5434                &aecTrigger->trigger, 1);
5435        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5436                &aecTrigger->trigger_id, 1);
5437        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5438                 aecTrigger->trigger);
5439        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
5440                aecTrigger->trigger_id);
5441    }
5442
5443    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5444        uint8_t fwk_ae_state = (uint8_t) *ae_state;
5445        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5446        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
5447    }
5448
5449    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5450        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5451        if (NAME_NOT_FOUND != val) {
5452            uint8_t fwkAfMode = (uint8_t)val;
5453            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5454            LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5455        } else {
5456            LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5457                    val);
5458        }
5459    }
5460
5461    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5462        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5463                &af_trigger->trigger, 1);
5464        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5465                 af_trigger->trigger);
5466        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5467        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
5468                af_trigger->trigger_id);
5469    }
5470
5471    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5472        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5473                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5474        if (NAME_NOT_FOUND != val) {
5475            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5476            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5477            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
5478        } else {
5479            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
5480        }
5481    }
5482
5483    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5484    uint32_t aeMode = CAM_AE_MODE_MAX;
5485    int32_t flashMode = CAM_FLASH_MODE_MAX;
5486    int32_t redeye = -1;
5487    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5488        aeMode = *pAeMode;
5489    }
5490    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5491        flashMode = *pFlashMode;
5492    }
5493    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5494        redeye = *pRedeye;
5495    }
5496
5497    if (1 == redeye) {
5498        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5499        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5500    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5501        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5502                flashMode);
5503        if (NAME_NOT_FOUND != val) {
5504            fwk_aeMode = (uint8_t)val;
5505            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5506        } else {
5507            LOGE("Unsupported flash mode %d", flashMode);
5508        }
5509    } else if (aeMode == CAM_AE_MODE_ON) {
5510        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5511        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5512    } else if (aeMode == CAM_AE_MODE_OFF) {
5513        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5514        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5515    } else {
5516        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5517              "flashMode:%d, aeMode:%u!!!",
5518                 redeye, flashMode, aeMode);
5519    }
5520
5521    resultMetadata = camMetadata.release();
5522    return resultMetadata;
5523}
5524
5525/*===========================================================================
5526 * FUNCTION   : dumpMetadataToFile
5527 *
5528 * DESCRIPTION: Dumps tuning metadata to file system
5529 *
5530 * PARAMETERS :
5531 *   @meta           : tuning metadata
5532 *   @dumpFrameCount : current dump frame count
5533 *   @enabled        : Enable mask
5534 *
5535 *==========================================================================*/
5536void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5537                                                   uint32_t &dumpFrameCount,
5538                                                   bool enabled,
5539                                                   const char *type,
5540                                                   uint32_t frameNumber)
5541{
5542    //Some sanity checks
5543    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5544        LOGE("Tuning sensor data size bigger than expected %d: %d",
5545              meta.tuning_sensor_data_size,
5546              TUNING_SENSOR_DATA_MAX);
5547        return;
5548    }
5549
5550    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5551        LOGE("Tuning VFE data size bigger than expected %d: %d",
5552              meta.tuning_vfe_data_size,
5553              TUNING_VFE_DATA_MAX);
5554        return;
5555    }
5556
5557    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5558        LOGE("Tuning CPP data size bigger than expected %d: %d",
5559              meta.tuning_cpp_data_size,
5560              TUNING_CPP_DATA_MAX);
5561        return;
5562    }
5563
5564    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5565        LOGE("Tuning CAC data size bigger than expected %d: %d",
5566              meta.tuning_cac_data_size,
5567              TUNING_CAC_DATA_MAX);
5568        return;
5569    }
5570    //
5571
5572    if(enabled){
5573        char timeBuf[FILENAME_MAX];
5574        char buf[FILENAME_MAX];
5575        memset(buf, 0, sizeof(buf));
5576        memset(timeBuf, 0, sizeof(timeBuf));
5577        time_t current_time;
5578        struct tm * timeinfo;
5579        time (&current_time);
5580        timeinfo = localtime (&current_time);
5581        if (timeinfo != NULL) {
5582            strftime (timeBuf, sizeof(timeBuf),
5583                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5584        }
5585        String8 filePath(timeBuf);
5586        snprintf(buf,
5587                sizeof(buf),
5588                "%dm_%s_%d.bin",
5589                dumpFrameCount,
5590                type,
5591                frameNumber);
5592        filePath.append(buf);
5593        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5594        if (file_fd >= 0) {
5595            ssize_t written_len = 0;
5596            meta.tuning_data_version = TUNING_DATA_VERSION;
5597            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5598            written_len += write(file_fd, data, sizeof(uint32_t));
5599            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5600            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5601            written_len += write(file_fd, data, sizeof(uint32_t));
5602            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5603            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5604            written_len += write(file_fd, data, sizeof(uint32_t));
5605            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5606            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5607            written_len += write(file_fd, data, sizeof(uint32_t));
5608            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5609            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5610            written_len += write(file_fd, data, sizeof(uint32_t));
5611            meta.tuning_mod3_data_size = 0;
5612            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5613            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5614            written_len += write(file_fd, data, sizeof(uint32_t));
5615            size_t total_size = meta.tuning_sensor_data_size;
5616            data = (void *)((uint8_t *)&meta.data);
5617            written_len += write(file_fd, data, total_size);
5618            total_size = meta.tuning_vfe_data_size;
5619            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5620            written_len += write(file_fd, data, total_size);
5621            total_size = meta.tuning_cpp_data_size;
5622            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5623            written_len += write(file_fd, data, total_size);
5624            total_size = meta.tuning_cac_data_size;
5625            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5626            written_len += write(file_fd, data, total_size);
5627            close(file_fd);
5628        }else {
5629            LOGE("fail to open file for metadata dumping");
5630        }
5631    }
5632}
5633
5634/*===========================================================================
5635 * FUNCTION   : cleanAndSortStreamInfo
5636 *
5637 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5638 *              and sort them such that raw stream is at the end of the list
5639 *              This is a workaround for camera daemon constraint.
5640 *
5641 * PARAMETERS : None
5642 *
5643 *==========================================================================*/
5644void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5645{
5646    List<stream_info_t *> newStreamInfo;
5647
5648    /*clean up invalid streams*/
5649    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5650            it != mStreamInfo.end();) {
5651        if(((*it)->status) == INVALID){
5652            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5653            delete channel;
5654            free(*it);
5655            it = mStreamInfo.erase(it);
5656        } else {
5657            it++;
5658        }
5659    }
5660
5661    // Move preview/video/callback/snapshot streams into newList
5662    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5663            it != mStreamInfo.end();) {
5664        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5665                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5666                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5667            newStreamInfo.push_back(*it);
5668            it = mStreamInfo.erase(it);
5669        } else
5670            it++;
5671    }
5672    // Move raw streams into newList
5673    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5674            it != mStreamInfo.end();) {
5675        newStreamInfo.push_back(*it);
5676        it = mStreamInfo.erase(it);
5677    }
5678
5679    mStreamInfo = newStreamInfo;
5680}
5681
5682/*===========================================================================
5683 * FUNCTION   : extractJpegMetadata
5684 *
5685 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5686 *              JPEG metadata is cached in HAL, and return as part of capture
5687 *              result when metadata is returned from camera daemon.
5688 *
5689 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5690 *              @request:      capture request
5691 *
5692 *==========================================================================*/
5693void QCamera3HardwareInterface::extractJpegMetadata(
5694        CameraMetadata& jpegMetadata,
5695        const camera3_capture_request_t *request)
5696{
5697    CameraMetadata frame_settings;
5698    frame_settings = request->settings;
5699
5700    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5701        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5702                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5703                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5704
5705    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5706        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5707                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5708                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5709
5710    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5711        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5712                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5713                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5714
5715    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5716        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5717                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5718                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5719
5720    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5721        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5722                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5723                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5724
5725    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5726        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5727                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5728                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5729
5730    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5731        int32_t thumbnail_size[2];
5732        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5733        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5734        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5735            int32_t orientation =
5736                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5737            if ((orientation == 90) || (orientation == 270)) {
5738               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5739               int32_t temp;
5740               temp = thumbnail_size[0];
5741               thumbnail_size[0] = thumbnail_size[1];
5742               thumbnail_size[1] = temp;
5743            }
5744         }
5745         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5746                thumbnail_size,
5747                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5748    }
5749
5750}
5751
5752/*===========================================================================
5753 * FUNCTION   : convertToRegions
5754 *
5755 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5756 *
5757 * PARAMETERS :
5758 *   @rect   : cam_rect_t struct to convert
5759 *   @region : int32_t destination array
5760 *   @weight : if we are converting from cam_area_t, weight is valid
5761 *             else weight = -1
5762 *
5763 *==========================================================================*/
5764void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5765        int32_t *region, int weight)
5766{
5767    region[0] = rect.left;
5768    region[1] = rect.top;
5769    region[2] = rect.left + rect.width;
5770    region[3] = rect.top + rect.height;
5771    if (weight > -1) {
5772        region[4] = weight;
5773    }
5774}
5775
5776/*===========================================================================
5777 * FUNCTION   : convertFromRegions
5778 *
5779 * DESCRIPTION: helper method to convert from array to cam_rect_t
5780 *
5781 * PARAMETERS :
5782 *   @rect   : cam_rect_t struct to convert
5783 *   @region : int32_t destination array
5784 *   @weight : if we are converting from cam_area_t, weight is valid
5785 *             else weight = -1
5786 *
5787 *==========================================================================*/
5788void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5789        const camera_metadata_t *settings, uint32_t tag)
5790{
5791    CameraMetadata frame_settings;
5792    frame_settings = settings;
5793    int32_t x_min = frame_settings.find(tag).data.i32[0];
5794    int32_t y_min = frame_settings.find(tag).data.i32[1];
5795    int32_t x_max = frame_settings.find(tag).data.i32[2];
5796    int32_t y_max = frame_settings.find(tag).data.i32[3];
5797    roi.weight = frame_settings.find(tag).data.i32[4];
5798    roi.rect.left = x_min;
5799    roi.rect.top = y_min;
5800    roi.rect.width = x_max - x_min;
5801    roi.rect.height = y_max - y_min;
5802}
5803
5804/*===========================================================================
5805 * FUNCTION   : resetIfNeededROI
5806 *
5807 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5808 *              crop region
5809 *
5810 * PARAMETERS :
5811 *   @roi       : cam_area_t struct to resize
5812 *   @scalerCropRegion : cam_crop_region_t region to compare against
5813 *
5814 *
5815 *==========================================================================*/
5816bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5817                                                 const cam_crop_region_t* scalerCropRegion)
5818{
5819    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5820    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5821    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5822    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5823
5824    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5825     * without having this check the calculations below to validate if the roi
5826     * is inside scalar crop region will fail resulting in the roi not being
5827     * reset causing algorithm to continue to use stale roi window
5828     */
5829    if (roi->weight == 0) {
5830        return true;
5831    }
5832
5833    if ((roi_x_max < scalerCropRegion->left) ||
5834        // right edge of roi window is left of scalar crop's left edge
5835        (roi_y_max < scalerCropRegion->top)  ||
5836        // bottom edge of roi window is above scalar crop's top edge
5837        (roi->rect.left > crop_x_max) ||
5838        // left edge of roi window is beyond(right) of scalar crop's right edge
5839        (roi->rect.top > crop_y_max)){
5840        // top edge of roi windo is above scalar crop's top edge
5841        return false;
5842    }
5843    if (roi->rect.left < scalerCropRegion->left) {
5844        roi->rect.left = scalerCropRegion->left;
5845    }
5846    if (roi->rect.top < scalerCropRegion->top) {
5847        roi->rect.top = scalerCropRegion->top;
5848    }
5849    if (roi_x_max > crop_x_max) {
5850        roi_x_max = crop_x_max;
5851    }
5852    if (roi_y_max > crop_y_max) {
5853        roi_y_max = crop_y_max;
5854    }
5855    roi->rect.width = roi_x_max - roi->rect.left;
5856    roi->rect.height = roi_y_max - roi->rect.top;
5857    return true;
5858}
5859
5860/*===========================================================================
5861 * FUNCTION   : convertLandmarks
5862 *
5863 * DESCRIPTION: helper method to extract the landmarks from face detection info
5864 *
5865 * PARAMETERS :
5866 *   @landmark_data : input landmark data to be converted
5867 *   @landmarks : int32_t destination array
5868 *
5869 *
5870 *==========================================================================*/
5871void QCamera3HardwareInterface::convertLandmarks(
5872        cam_face_landmarks_info_t landmark_data,
5873        int32_t *landmarks)
5874{
5875    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
5876    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
5877    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
5878    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
5879    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
5880    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
5881}
5882
5883#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5884/*===========================================================================
5885 * FUNCTION   : initCapabilities
5886 *
5887 * DESCRIPTION: initialize camera capabilities in static data struct
5888 *
5889 * PARAMETERS :
5890 *   @cameraId  : camera Id
5891 *
5892 * RETURN     : int32_t type of status
5893 *              NO_ERROR  -- success
5894 *              none-zero failure code
5895 *==========================================================================*/
5896int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5897{
5898    int rc = 0;
5899    mm_camera_vtbl_t *cameraHandle = NULL;
5900    QCamera3HeapMemory *capabilityHeap = NULL;
5901
5902    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5903    if (rc) {
5904        LOGE("camera_open failed. rc = %d", rc);
5905        goto open_failed;
5906    }
5907    if (!cameraHandle) {
5908        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
5909        goto open_failed;
5910    }
5911
5912    capabilityHeap = new QCamera3HeapMemory(1);
5913    if (capabilityHeap == NULL) {
5914        LOGE("creation of capabilityHeap failed");
5915        goto heap_creation_failed;
5916    }
5917    /* Allocate memory for capability buffer */
5918    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5919    if(rc != OK) {
5920        LOGE("No memory for cappability");
5921        goto allocate_failed;
5922    }
5923
5924    /* Map memory for capability buffer */
5925    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5926    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5927                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5928                                capabilityHeap->getFd(0),
5929                                sizeof(cam_capability_t));
5930    if(rc < 0) {
5931        LOGE("failed to map capability buffer");
5932        goto map_failed;
5933    }
5934
5935    /* Query Capability */
5936    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5937    if(rc < 0) {
5938        LOGE("failed to query capability");
5939        goto query_failed;
5940    }
5941    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5942    if (!gCamCapability[cameraId]) {
5943        LOGE("out of memory");
5944        goto query_failed;
5945    }
5946    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5947                                        sizeof(cam_capability_t));
5948    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_x = 0;
5949    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_y = 0;
5950    rc = 0;
5951
5952query_failed:
5953    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5954                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5955map_failed:
5956    capabilityHeap->deallocate();
5957allocate_failed:
5958    delete capabilityHeap;
5959heap_creation_failed:
5960    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5961    cameraHandle = NULL;
5962open_failed:
5963    return rc;
5964}
5965
5966/*==========================================================================
5967 * FUNCTION   : get3Aversion
5968 *
5969 * DESCRIPTION: get the Q3A S/W version
5970 *
5971 * PARAMETERS :
5972 *  @sw_version: Reference of Q3A structure which will hold version info upon
5973 *               return
5974 *
5975 * RETURN     : None
5976 *
5977 *==========================================================================*/
5978void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5979{
5980    if(gCamCapability[mCameraId])
5981        sw_version = gCamCapability[mCameraId]->q3a_version;
5982    else
5983        LOGE("Capability structure NULL!");
5984}
5985
5986
5987/*===========================================================================
5988 * FUNCTION   : initParameters
5989 *
5990 * DESCRIPTION: initialize camera parameters
5991 *
5992 * PARAMETERS :
5993 *
5994 * RETURN     : int32_t type of status
5995 *              NO_ERROR  -- success
5996 *              none-zero failure code
5997 *==========================================================================*/
5998int QCamera3HardwareInterface::initParameters()
5999{
6000    int rc = 0;
6001
6002    //Allocate Set Param Buffer
6003    mParamHeap = new QCamera3HeapMemory(1);
6004    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6005    if(rc != OK) {
6006        rc = NO_MEMORY;
6007        LOGE("Failed to allocate SETPARM Heap memory");
6008        delete mParamHeap;
6009        mParamHeap = NULL;
6010        return rc;
6011    }
6012
6013    //Map memory for parameters buffer
6014    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6015            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6016            mParamHeap->getFd(0),
6017            sizeof(metadata_buffer_t));
6018    if(rc < 0) {
6019        LOGE("failed to map SETPARM buffer");
6020        rc = FAILED_TRANSACTION;
6021        mParamHeap->deallocate();
6022        delete mParamHeap;
6023        mParamHeap = NULL;
6024        return rc;
6025    }
6026
6027    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6028
6029    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6030    return rc;
6031}
6032
6033/*===========================================================================
6034 * FUNCTION   : deinitParameters
6035 *
6036 * DESCRIPTION: de-initialize camera parameters
6037 *
6038 * PARAMETERS :
6039 *
6040 * RETURN     : NONE
6041 *==========================================================================*/
6042void QCamera3HardwareInterface::deinitParameters()
6043{
6044    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6045            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6046
6047    mParamHeap->deallocate();
6048    delete mParamHeap;
6049    mParamHeap = NULL;
6050
6051    mParameters = NULL;
6052
6053    free(mPrevParameters);
6054    mPrevParameters = NULL;
6055}
6056
6057/*===========================================================================
6058 * FUNCTION   : calcMaxJpegSize
6059 *
6060 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6061 *
6062 * PARAMETERS :
6063 *
6064 * RETURN     : max_jpeg_size
6065 *==========================================================================*/
6066size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6067{
6068    size_t max_jpeg_size = 0;
6069    size_t temp_width, temp_height;
6070    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6071            MAX_SIZES_CNT);
6072    for (size_t i = 0; i < count; i++) {
6073        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6074        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6075        if (temp_width * temp_height > max_jpeg_size ) {
6076            max_jpeg_size = temp_width * temp_height;
6077        }
6078    }
6079    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6080    return max_jpeg_size;
6081}
6082
6083/*===========================================================================
6084 * FUNCTION   : getMaxRawSize
6085 *
6086 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6087 *
6088 * PARAMETERS :
6089 *
6090 * RETURN     : Largest supported Raw Dimension
6091 *==========================================================================*/
6092cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6093{
6094    int max_width = 0;
6095    cam_dimension_t maxRawSize;
6096
6097    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6098    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6099        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6100            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6101            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6102        }
6103    }
6104    return maxRawSize;
6105}
6106
6107
6108/*===========================================================================
6109 * FUNCTION   : calcMaxJpegDim
6110 *
6111 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6112 *
6113 * PARAMETERS :
6114 *
6115 * RETURN     : max_jpeg_dim
6116 *==========================================================================*/
6117cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6118{
6119    cam_dimension_t max_jpeg_dim;
6120    cam_dimension_t curr_jpeg_dim;
6121    max_jpeg_dim.width = 0;
6122    max_jpeg_dim.height = 0;
6123    curr_jpeg_dim.width = 0;
6124    curr_jpeg_dim.height = 0;
6125    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6126        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6127        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6128        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6129            max_jpeg_dim.width * max_jpeg_dim.height ) {
6130            max_jpeg_dim.width = curr_jpeg_dim.width;
6131            max_jpeg_dim.height = curr_jpeg_dim.height;
6132        }
6133    }
6134    return max_jpeg_dim;
6135}
6136
6137/*===========================================================================
6138 * FUNCTION   : addStreamConfig
6139 *
6140 * DESCRIPTION: adds the stream configuration to the array
6141 *
6142 * PARAMETERS :
6143 * @available_stream_configs : pointer to stream configuration array
6144 * @scalar_format            : scalar format
6145 * @dim                      : configuration dimension
6146 * @config_type              : input or output configuration type
6147 *
6148 * RETURN     : NONE
6149 *==========================================================================*/
6150void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6151        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6152{
6153    available_stream_configs.add(scalar_format);
6154    available_stream_configs.add(dim.width);
6155    available_stream_configs.add(dim.height);
6156    available_stream_configs.add(config_type);
6157}
6158
6159/*===========================================================================
6160 * FUNCTION   : suppportBurstCapture
6161 *
6162 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6163 *
6164 * PARAMETERS :
6165 *   @cameraId  : camera Id
6166 *
6167 * RETURN     : true if camera supports BURST_CAPTURE
6168 *              false otherwise
6169 *==========================================================================*/
6170bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6171{
6172    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6173    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6174    const int32_t highResWidth = 3264;
6175    const int32_t highResHeight = 2448;
6176
6177    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6178        // Maximum resolution images cannot be captured at >= 10fps
6179        // -> not supporting BURST_CAPTURE
6180        return false;
6181    }
6182
6183    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6184        // Maximum resolution images can be captured at >= 20fps
6185        // --> supporting BURST_CAPTURE
6186        return true;
6187    }
6188
6189    // Find the smallest highRes resolution, or largest resolution if there is none
6190    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6191            MAX_SIZES_CNT);
6192    size_t highRes = 0;
6193    while ((highRes + 1 < totalCnt) &&
6194            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6195            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6196            highResWidth * highResHeight)) {
6197        highRes++;
6198    }
6199    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6200        return true;
6201    } else {
6202        return false;
6203    }
6204}
6205
6206/*===========================================================================
6207 * FUNCTION   : initStaticMetadata
6208 *
6209 * DESCRIPTION: initialize the static metadata
6210 *
6211 * PARAMETERS :
6212 *   @cameraId  : camera Id
6213 *
6214 * RETURN     : int32_t type of status
6215 *              0  -- success
6216 *              non-zero failure code
6217 *==========================================================================*/
6218int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6219{
6220    int rc = 0;
6221    CameraMetadata staticInfo;
6222    size_t count = 0;
6223    bool limitedDevice = false;
6224    char prop[PROPERTY_VALUE_MAX];
6225    bool supportBurst = false;
6226
6227    supportBurst = supportBurstCapture(cameraId);
6228
6229    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6230     * guaranteed or if min fps of max resolution is less than 20 fps, its
6231     * advertised as limited device*/
6232    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6233            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6234            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6235            !supportBurst;
6236
6237    uint8_t supportedHwLvl = limitedDevice ?
6238            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6239            // LEVEL_3 - This device will support level 3.
6240            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6241
6242    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6243            &supportedHwLvl, 1);
6244
6245    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6246    /*HAL 3 only*/
6247    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6248                    &gCamCapability[cameraId]->min_focus_distance, 1);
6249
6250    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6251                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6252
6253    /*should be using focal lengths but sensor doesn't provide that info now*/
6254    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6255                      &gCamCapability[cameraId]->focal_length,
6256                      1);
6257
6258    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6259            gCamCapability[cameraId]->apertures,
6260            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6261
6262    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6263            gCamCapability[cameraId]->filter_densities,
6264            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6265
6266
6267    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6268            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6269            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6270
6271    int32_t lens_shading_map_size[] = {
6272            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6273            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6274    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6275                      lens_shading_map_size,
6276                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6277
6278    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6279            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6280
6281    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6282            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6283
6284    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6285            &gCamCapability[cameraId]->max_frame_duration, 1);
6286
6287    camera_metadata_rational baseGainFactor = {
6288            gCamCapability[cameraId]->base_gain_factor.numerator,
6289            gCamCapability[cameraId]->base_gain_factor.denominator};
6290    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6291                      &baseGainFactor, 1);
6292
6293    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6294                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6295
6296    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6297            gCamCapability[cameraId]->pixel_array_size.height};
6298    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6299                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6300
6301    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6302            gCamCapability[cameraId]->active_array_size.top,
6303            gCamCapability[cameraId]->active_array_size.width,
6304            gCamCapability[cameraId]->active_array_size.height};
6305    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6306            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6307
6308    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6309            &gCamCapability[cameraId]->white_level, 1);
6310
6311    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6312            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6313
6314    bool hasBlackRegions = false;
6315    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6316        LOGW("black_region_count: %d is bounded to %d",
6317            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6318        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6319    }
6320    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6321        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6322        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6323            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6324        }
6325        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6326                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6327        hasBlackRegions = true;
6328    }
6329
6330    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6331            &gCamCapability[cameraId]->flash_charge_duration, 1);
6332
6333    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6334            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6335
6336    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
6337    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6338            &timestampSource, 1);
6339
6340    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6341            &gCamCapability[cameraId]->histogram_size, 1);
6342
6343    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6344            &gCamCapability[cameraId]->max_histogram_count, 1);
6345
6346    int32_t sharpness_map_size[] = {
6347            gCamCapability[cameraId]->sharpness_map_size.width,
6348            gCamCapability[cameraId]->sharpness_map_size.height};
6349
6350    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6351            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6352
6353    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6354            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6355
6356    int32_t scalar_formats[] = {
6357            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6358            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6359            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6360            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6361            HAL_PIXEL_FORMAT_RAW10,
6362            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6363    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6364    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6365                      scalar_formats,
6366                      scalar_formats_count);
6367
6368    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6369    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6370    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6371            count, MAX_SIZES_CNT, available_processed_sizes);
6372    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6373            available_processed_sizes, count * 2);
6374
6375    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6376    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6377    makeTable(gCamCapability[cameraId]->raw_dim,
6378            count, MAX_SIZES_CNT, available_raw_sizes);
6379    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6380            available_raw_sizes, count * 2);
6381
6382    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6383    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6384    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6385            count, MAX_SIZES_CNT, available_fps_ranges);
6386    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6387            available_fps_ranges, count * 2);
6388
6389    camera_metadata_rational exposureCompensationStep = {
6390            gCamCapability[cameraId]->exp_compensation_step.numerator,
6391            gCamCapability[cameraId]->exp_compensation_step.denominator};
6392    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6393                      &exposureCompensationStep, 1);
6394
6395    Vector<uint8_t> availableVstabModes;
6396    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6397    char eis_prop[PROPERTY_VALUE_MAX];
6398    memset(eis_prop, 0, sizeof(eis_prop));
6399    property_get("persist.camera.eis.enable", eis_prop, "0");
6400    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6401    if (facingBack && eis_prop_set) {
6402        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6403    }
6404    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6405                      availableVstabModes.array(), availableVstabModes.size());
6406
6407    /*HAL 1 and HAL 3 common*/
6408    float maxZoom = 4;
6409    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6410            &maxZoom, 1);
6411
6412    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
6413    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6414
6415    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6416    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6417        max3aRegions[2] = 0; /* AF not supported */
6418    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6419            max3aRegions, 3);
6420
6421    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6422    memset(prop, 0, sizeof(prop));
6423    property_get("persist.camera.facedetect", prop, "1");
6424    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6425    LOGD("Support face detection mode: %d",
6426             supportedFaceDetectMode);
6427
6428    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6429    Vector<uint8_t> availableFaceDetectModes;
6430    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6431    if (supportedFaceDetectMode == 1) {
6432        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6433    } else if (supportedFaceDetectMode == 2) {
6434        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6435    } else if (supportedFaceDetectMode == 3) {
6436        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6437        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6438    } else {
6439        maxFaces = 0;
6440    }
6441    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6442            availableFaceDetectModes.array(),
6443            availableFaceDetectModes.size());
6444    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6445            (int32_t *)&maxFaces, 1);
6446
6447    int32_t exposureCompensationRange[] = {
6448            gCamCapability[cameraId]->exposure_compensation_min,
6449            gCamCapability[cameraId]->exposure_compensation_max};
6450    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6451            exposureCompensationRange,
6452            sizeof(exposureCompensationRange)/sizeof(int32_t));
6453
6454    uint8_t lensFacing = (facingBack) ?
6455            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6456    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6457
6458    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6459                      available_thumbnail_sizes,
6460                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6461
6462    /*all sizes will be clubbed into this tag*/
6463    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6464    /*android.scaler.availableStreamConfigurations*/
6465    Vector<int32_t> available_stream_configs;
6466    cam_dimension_t active_array_dim;
6467    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6468    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6469    /* Add input/output stream configurations for each scalar formats*/
6470    for (size_t j = 0; j < scalar_formats_count; j++) {
6471        switch (scalar_formats[j]) {
6472        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6473        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6474        case HAL_PIXEL_FORMAT_RAW10:
6475            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6476                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6477                addStreamConfig(available_stream_configs, scalar_formats[j],
6478                        gCamCapability[cameraId]->raw_dim[i],
6479                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6480            }
6481            break;
6482        case HAL_PIXEL_FORMAT_BLOB:
6483            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6484                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6485                addStreamConfig(available_stream_configs, scalar_formats[j],
6486                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6487                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6488            }
6489            break;
6490        case HAL_PIXEL_FORMAT_YCbCr_420_888:
6491        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6492        default:
6493            cam_dimension_t largest_picture_size;
6494            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6495            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6496                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6497                addStreamConfig(available_stream_configs, scalar_formats[j],
6498                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6499                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6500                /* Book keep largest */
6501                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6502                        >= largest_picture_size.width &&
6503                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
6504                        >= largest_picture_size.height)
6505                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6506            }
6507            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6508            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6509                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6510                 addStreamConfig(available_stream_configs, scalar_formats[j],
6511                         largest_picture_size,
6512                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6513            }
6514            break;
6515        }
6516    }
6517
6518    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6519                      available_stream_configs.array(), available_stream_configs.size());
6520    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6521    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6522
6523    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6524    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6525
6526    /* android.scaler.availableMinFrameDurations */
6527    Vector<int64_t> available_min_durations;
6528    for (size_t j = 0; j < scalar_formats_count; j++) {
6529        switch (scalar_formats[j]) {
6530        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6531        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6532        case HAL_PIXEL_FORMAT_RAW10:
6533            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6534                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6535                available_min_durations.add(scalar_formats[j]);
6536                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
6537                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
6538                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
6539            }
6540            break;
6541        default:
6542            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6543                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6544                available_min_durations.add(scalar_formats[j]);
6545                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
6546                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
6547                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
6548            }
6549            break;
6550        }
6551    }
6552    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6553                      available_min_durations.array(), available_min_durations.size());
6554
6555    Vector<int32_t> available_hfr_configs;
6556    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6557        int32_t fps = 0;
6558        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6559        case CAM_HFR_MODE_60FPS:
6560            fps = 60;
6561            break;
6562        case CAM_HFR_MODE_90FPS:
6563            fps = 90;
6564            break;
6565        case CAM_HFR_MODE_120FPS:
6566            fps = 120;
6567            break;
6568        case CAM_HFR_MODE_150FPS:
6569            fps = 150;
6570            break;
6571        case CAM_HFR_MODE_180FPS:
6572            fps = 180;
6573            break;
6574        case CAM_HFR_MODE_210FPS:
6575            fps = 210;
6576            break;
6577        case CAM_HFR_MODE_240FPS:
6578            fps = 240;
6579            break;
6580        case CAM_HFR_MODE_480FPS:
6581            fps = 480;
6582            break;
6583        case CAM_HFR_MODE_OFF:
6584        case CAM_HFR_MODE_MAX:
6585        default:
6586            break;
6587        }
6588
6589        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6590        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6591            /* For each HFR frame rate, need to advertise one variable fps range
6592             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
6593             * [120, 120]. While camcorder preview alone is running [30, 120] is
6594             * set by the app. When video recording is started, [120, 120] is
6595             * set. This way sensor configuration does not change when recording
6596             * is started */
6597
6598            size_t len = sizeof(default_hfr_video_sizes) / sizeof(default_hfr_video_sizes[0]);
6599            for (size_t j = 0; j < len; j++) {
6600                if ((default_hfr_video_sizes[j].width <= gCamCapability[cameraId]->hfr_tbl[i].dim.width) &&
6601                    (default_hfr_video_sizes[j].height <= gCamCapability[cameraId]->hfr_tbl[i].dim.height)) {
6602                    //TODO: Might need additional filtering based on VFE/CPP/CPU capabilities
6603
6604                    /* (width, height, fps_min, fps_max, batch_size_max) */
6605                    available_hfr_configs.add(default_hfr_video_sizes[j].width);
6606                    available_hfr_configs.add(default_hfr_video_sizes[j].height);
6607                    available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6608                    available_hfr_configs.add(fps);
6609                    available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6610
6611                    /* (width, height, fps_min, fps_max, batch_size_max) */
6612                    available_hfr_configs.add(default_hfr_video_sizes[j].width);
6613                    available_hfr_configs.add(default_hfr_video_sizes[j].height);
6614                    available_hfr_configs.add(fps);
6615                    available_hfr_configs.add(fps);
6616                    available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6617                }// if
6618            }// for (...; j < len;...)
6619       } //if (fps >= MIN_FPS_FOR_BATCH_MODE)
6620    }
6621    //Advertise HFR capability only if the property is set
6622    memset(prop, 0, sizeof(prop));
6623    property_get("persist.camera.hal3hfr.enable", prop, "1");
6624    uint8_t hfrEnable = (uint8_t)atoi(prop);
6625
6626    if(hfrEnable && available_hfr_configs.array()) {
6627        staticInfo.update(
6628                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6629                available_hfr_configs.array(), available_hfr_configs.size());
6630    }
6631
6632    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6633    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6634                      &max_jpeg_size, 1);
6635
6636    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6637    size_t size = 0;
6638    count = CAM_EFFECT_MODE_MAX;
6639    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6640    for (size_t i = 0; i < count; i++) {
6641        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6642                gCamCapability[cameraId]->supported_effects[i]);
6643        if (NAME_NOT_FOUND != val) {
6644            avail_effects[size] = (uint8_t)val;
6645            size++;
6646        }
6647    }
6648    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6649                      avail_effects,
6650                      size);
6651
6652    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6653    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6654    size_t supported_scene_modes_cnt = 0;
6655    count = CAM_SCENE_MODE_MAX;
6656    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6657    for (size_t i = 0; i < count; i++) {
6658        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6659                CAM_SCENE_MODE_OFF) {
6660            int val = lookupFwkName(SCENE_MODES_MAP,
6661                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
6662                    gCamCapability[cameraId]->supported_scene_modes[i]);
6663            if (NAME_NOT_FOUND != val) {
6664                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6665                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6666                supported_scene_modes_cnt++;
6667            }
6668        }
6669    }
6670    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6671                      avail_scene_modes,
6672                      supported_scene_modes_cnt);
6673
6674    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6675    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6676                      supported_scene_modes_cnt,
6677                      CAM_SCENE_MODE_MAX,
6678                      scene_mode_overrides,
6679                      supported_indexes,
6680                      cameraId);
6681
6682    if (supported_scene_modes_cnt == 0) {
6683        supported_scene_modes_cnt = 1;
6684        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6685    }
6686
6687    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6688            scene_mode_overrides, supported_scene_modes_cnt * 3);
6689
6690    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6691                                         ANDROID_CONTROL_MODE_AUTO,
6692                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6693    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6694            available_control_modes,
6695            3);
6696
6697    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6698    size = 0;
6699    count = CAM_ANTIBANDING_MODE_MAX;
6700    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6701    for (size_t i = 0; i < count; i++) {
6702        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6703                gCamCapability[cameraId]->supported_antibandings[i]);
6704        if (NAME_NOT_FOUND != val) {
6705            avail_antibanding_modes[size] = (uint8_t)val;
6706            size++;
6707        }
6708
6709    }
6710    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6711                      avail_antibanding_modes,
6712                      size);
6713
6714    uint8_t avail_abberation_modes[] = {
6715            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
6716            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
6717            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
6718    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6719    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6720    if (0 == count) {
6721        //  If no aberration correction modes are available for a device, this advertise OFF mode
6722        size = 1;
6723    } else {
6724        // If count is not zero then atleast one among the FAST or HIGH quality is supported
6725        // So, advertize all 3 modes if atleast any one mode is supported as per the
6726        // new M requirement
6727        size = 3;
6728    }
6729    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6730            avail_abberation_modes,
6731            size);
6732
6733    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6734    size = 0;
6735    count = CAM_FOCUS_MODE_MAX;
6736    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6737    for (size_t i = 0; i < count; i++) {
6738        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6739                gCamCapability[cameraId]->supported_focus_modes[i]);
6740        if (NAME_NOT_FOUND != val) {
6741            avail_af_modes[size] = (uint8_t)val;
6742            size++;
6743        }
6744    }
6745    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6746                      avail_af_modes,
6747                      size);
6748
6749    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6750    size = 0;
6751    count = CAM_WB_MODE_MAX;
6752    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6753    for (size_t i = 0; i < count; i++) {
6754        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6755                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6756                gCamCapability[cameraId]->supported_white_balances[i]);
6757        if (NAME_NOT_FOUND != val) {
6758            avail_awb_modes[size] = (uint8_t)val;
6759            size++;
6760        }
6761    }
6762    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6763                      avail_awb_modes,
6764                      size);
6765
6766    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6767    count = CAM_FLASH_FIRING_LEVEL_MAX;
6768    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6769            count);
6770    for (size_t i = 0; i < count; i++) {
6771        available_flash_levels[i] =
6772                gCamCapability[cameraId]->supported_firing_levels[i];
6773    }
6774    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6775            available_flash_levels, count);
6776
6777    uint8_t flashAvailable;
6778    if (gCamCapability[cameraId]->flash_available)
6779        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6780    else
6781        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6782    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6783            &flashAvailable, 1);
6784
6785    Vector<uint8_t> avail_ae_modes;
6786    count = CAM_AE_MODE_MAX;
6787    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6788    for (size_t i = 0; i < count; i++) {
6789        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6790    }
6791    if (flashAvailable) {
6792        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6793        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6794    }
6795    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6796                      avail_ae_modes.array(),
6797                      avail_ae_modes.size());
6798
6799    int32_t sensitivity_range[2];
6800    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6801    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6802    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6803                      sensitivity_range,
6804                      sizeof(sensitivity_range) / sizeof(int32_t));
6805
6806    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6807                      &gCamCapability[cameraId]->max_analog_sensitivity,
6808                      1);
6809
6810    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6811    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6812                      &sensor_orientation,
6813                      1);
6814
6815    int32_t max_output_streams[] = {
6816            MAX_STALLING_STREAMS,
6817            MAX_PROCESSED_STREAMS,
6818            MAX_RAW_STREAMS};
6819    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6820            max_output_streams,
6821            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6822
6823    uint8_t avail_leds = 0;
6824    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6825                      &avail_leds, 0);
6826
6827    uint8_t focus_dist_calibrated;
6828    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6829            gCamCapability[cameraId]->focus_dist_calibrated);
6830    if (NAME_NOT_FOUND != val) {
6831        focus_dist_calibrated = (uint8_t)val;
6832        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6833                     &focus_dist_calibrated, 1);
6834    }
6835
6836    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6837    size = 0;
6838    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6839            MAX_TEST_PATTERN_CNT);
6840    for (size_t i = 0; i < count; i++) {
6841        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6842                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6843        if (NAME_NOT_FOUND != testpatternMode) {
6844            avail_testpattern_modes[size] = testpatternMode;
6845            size++;
6846        }
6847    }
6848    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6849                      avail_testpattern_modes,
6850                      size);
6851
6852    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6853    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6854                      &max_pipeline_depth,
6855                      1);
6856
6857    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6858    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6859                      &partial_result_count,
6860                       1);
6861
6862    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6863    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6864
6865    Vector<uint8_t> available_capabilities;
6866    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6867    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6868    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6869    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6870    if (supportBurst) {
6871        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6872    }
6873    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6874    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6875    if (hfrEnable && available_hfr_configs.array()) {
6876        available_capabilities.add(
6877                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6878    }
6879
6880    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6881        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6882    }
6883    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6884            available_capabilities.array(),
6885            available_capabilities.size());
6886
6887    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
6888    //Assumption is that all bayer cameras support MANUAL_SENSOR.
6889    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6890            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6891
6892    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6893            &aeLockAvailable, 1);
6894
6895    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
6896    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
6897    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6898            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6899
6900    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6901            &awbLockAvailable, 1);
6902
6903    int32_t max_input_streams = 1;
6904    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6905                      &max_input_streams,
6906                      1);
6907
6908    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6909    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6910            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6911            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6912            HAL_PIXEL_FORMAT_YCbCr_420_888};
6913    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6914                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6915
6916    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6917    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6918                      &max_latency,
6919                      1);
6920
6921    int32_t isp_sensitivity_range[2];
6922    isp_sensitivity_range[0] =
6923        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
6924    isp_sensitivity_range[1] =
6925        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
6926    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
6927                      isp_sensitivity_range,
6928                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
6929
6930    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6931                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6932    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6933            available_hot_pixel_modes,
6934            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6935
6936    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6937                                         ANDROID_SHADING_MODE_FAST,
6938                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6939    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6940                      available_shading_modes,
6941                      3);
6942
6943    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6944                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6945    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6946                      available_lens_shading_map_modes,
6947                      2);
6948
6949    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6950                                      ANDROID_EDGE_MODE_FAST,
6951                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6952                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6953    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6954            available_edge_modes,
6955            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6956
6957    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6958                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6959                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6960                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6961                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6962    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6963            available_noise_red_modes,
6964            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6965
6966    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6967                                         ANDROID_TONEMAP_MODE_FAST,
6968                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6969    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6970            available_tonemap_modes,
6971            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6972
6973    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6974    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6975            available_hot_pixel_map_modes,
6976            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6977
6978    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6979            gCamCapability[cameraId]->reference_illuminant1);
6980    if (NAME_NOT_FOUND != val) {
6981        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6982        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6983    }
6984
6985    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6986            gCamCapability[cameraId]->reference_illuminant2);
6987    if (NAME_NOT_FOUND != val) {
6988        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6989        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6990    }
6991
6992    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6993            (void *)gCamCapability[cameraId]->forward_matrix1,
6994            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6995
6996    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6997            (void *)gCamCapability[cameraId]->forward_matrix2,
6998            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6999
7000    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7001            (void *)gCamCapability[cameraId]->color_transform1,
7002            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7003
7004    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7005            (void *)gCamCapability[cameraId]->color_transform2,
7006            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7007
7008    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7009            (void *)gCamCapability[cameraId]->calibration_transform1,
7010            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7011
7012    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7013            (void *)gCamCapability[cameraId]->calibration_transform2,
7014            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7015
7016    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7017       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7018       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7019       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7020       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7021       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7022       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7023       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7024       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7025       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7026       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7027       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7028       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7029       ANDROID_JPEG_GPS_COORDINATES,
7030       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7031       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7032       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7033       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7034       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7035       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7036       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7037       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7038       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7039       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7040       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7041       ANDROID_STATISTICS_FACE_DETECT_MODE,
7042       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7043       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7044       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7045       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE};
7046
7047    size_t request_keys_cnt =
7048            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7049    Vector<int32_t> available_request_keys;
7050    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7051    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7052        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7053    }
7054
7055    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7056            available_request_keys.array(), available_request_keys.size());
7057
7058    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7059       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7060       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7061       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7062       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7063       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7064       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7065       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7066       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7067       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7068       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7069       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7070       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7071       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7072       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7073       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7074       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7075       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7076       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7077       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7078       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7079       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7080       ANDROID_STATISTICS_FACE_SCORES,
7081       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
7082    size_t result_keys_cnt =
7083            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7084
7085    Vector<int32_t> available_result_keys;
7086    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7087    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7088        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7089    }
7090    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7091        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7092        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7093    }
7094    if (supportedFaceDetectMode == 1) {
7095        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7096        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7097    } else if ((supportedFaceDetectMode == 2) ||
7098            (supportedFaceDetectMode == 3)) {
7099        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7100        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7101    }
7102    if (hasBlackRegions) {
7103        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7104        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7105    }
7106    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7107            available_result_keys.array(), available_result_keys.size());
7108
7109    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7110       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7111       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7112       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7113       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7114       ANDROID_SCALER_CROPPING_TYPE,
7115       ANDROID_SYNC_MAX_LATENCY,
7116       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7117       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7118       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7119       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7120       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7121       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7122       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7123       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7124       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7125       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7126       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7127       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7128       ANDROID_LENS_FACING,
7129       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7130       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7131       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7132       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7133       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7134       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7135       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7136       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7137       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7138       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7139       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7140       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7141       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7142       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7143       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7144       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7145       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7146       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7147       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7148       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7149       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7150       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7151       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7152       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7153       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7154       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7155       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7156       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7157       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7158       ANDROID_CONTROL_AVAILABLE_MODES,
7159       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7160       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7161       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7162       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7163       ANDROID_SHADING_AVAILABLE_MODES,
7164       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7165       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7166
7167    Vector<int32_t> available_characteristics_keys;
7168    available_characteristics_keys.appendArray(characteristics_keys_basic,
7169            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7170    if (hasBlackRegions) {
7171        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7172    }
7173    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7174                      available_characteristics_keys.array(),
7175                      available_characteristics_keys.size());
7176
7177    /*available stall durations depend on the hw + sw and will be different for different devices */
7178    /*have to add for raw after implementation*/
7179    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7180    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7181
7182    Vector<int64_t> available_stall_durations;
7183    for (uint32_t j = 0; j < stall_formats_count; j++) {
7184        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7185            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7186                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7187                available_stall_durations.add(stall_formats[j]);
7188                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7189                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7190                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7191          }
7192        } else {
7193            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7194                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7195                available_stall_durations.add(stall_formats[j]);
7196                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7197                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7198                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7199            }
7200        }
7201    }
7202    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7203                      available_stall_durations.array(),
7204                      available_stall_durations.size());
7205
7206    //QCAMERA3_OPAQUE_RAW
7207    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7208    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7209    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7210    case LEGACY_RAW:
7211        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7212            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7213        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7214            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7215        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7216            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7217        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7218        break;
7219    case MIPI_RAW:
7220        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7221            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7222        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7223            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7224        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7225            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7226        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7227        break;
7228    default:
7229        LOGE("unknown opaque_raw_format %d",
7230                gCamCapability[cameraId]->opaque_raw_fmt);
7231        break;
7232    }
7233    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7234
7235    Vector<int32_t> strides;
7236    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7237            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7238        cam_stream_buf_plane_info_t buf_planes;
7239        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7240        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7241        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7242            &gCamCapability[cameraId]->padding_info, &buf_planes);
7243        strides.add(buf_planes.plane_info.mp[0].stride);
7244    }
7245    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7246            strides.size());
7247
7248    Vector<int32_t> opaque_size;
7249    for (size_t j = 0; j < scalar_formats_count; j++) {
7250        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7251            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7252                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7253                cam_stream_buf_plane_info_t buf_planes;
7254
7255                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7256                         &gCamCapability[cameraId]->padding_info, &buf_planes);
7257
7258                if (rc == 0) {
7259                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7260                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7261                    opaque_size.add(buf_planes.plane_info.frame_len);
7262                }else {
7263                    LOGE("raw frame calculation failed!");
7264                }
7265            }
7266        }
7267    }
7268
7269    if ((opaque_size.size() > 0) &&
7270            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7271        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7272    else
7273        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7274
7275    gStaticMetadata[cameraId] = staticInfo.release();
7276    return rc;
7277}
7278
7279/*===========================================================================
7280 * FUNCTION   : makeTable
7281 *
7282 * DESCRIPTION: make a table of sizes
7283 *
7284 * PARAMETERS :
7285 *
7286 *
7287 *==========================================================================*/
7288void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7289        size_t max_size, int32_t *sizeTable)
7290{
7291    size_t j = 0;
7292    if (size > max_size) {
7293       size = max_size;
7294    }
7295    for (size_t i = 0; i < size; i++) {
7296        sizeTable[j] = dimTable[i].width;
7297        sizeTable[j+1] = dimTable[i].height;
7298        j+=2;
7299    }
7300}
7301
7302/*===========================================================================
7303 * FUNCTION   : makeFPSTable
7304 *
7305 * DESCRIPTION: make a table of fps ranges
7306 *
7307 * PARAMETERS :
7308 *
7309 *==========================================================================*/
7310void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7311        size_t max_size, int32_t *fpsRangesTable)
7312{
7313    size_t j = 0;
7314    if (size > max_size) {
7315       size = max_size;
7316    }
7317    for (size_t i = 0; i < size; i++) {
7318        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7319        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7320        j+=2;
7321    }
7322}
7323
7324/*===========================================================================
7325 * FUNCTION   : makeOverridesList
7326 *
7327 * DESCRIPTION: make a list of scene mode overrides
7328 *
7329 * PARAMETERS :
7330 *
7331 *
7332 *==========================================================================*/
7333void QCamera3HardwareInterface::makeOverridesList(
7334        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7335        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7336{
7337    /*daemon will give a list of overrides for all scene modes.
7338      However we should send the fwk only the overrides for the scene modes
7339      supported by the framework*/
7340    size_t j = 0;
7341    if (size > max_size) {
7342       size = max_size;
7343    }
7344    size_t focus_count = CAM_FOCUS_MODE_MAX;
7345    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7346            focus_count);
7347    for (size_t i = 0; i < size; i++) {
7348        bool supt = false;
7349        size_t index = supported_indexes[i];
7350        overridesList[j] = gCamCapability[camera_id]->flash_available ?
7351                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7352        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7353                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7354                overridesTable[index].awb_mode);
7355        if (NAME_NOT_FOUND != val) {
7356            overridesList[j+1] = (uint8_t)val;
7357        }
7358        uint8_t focus_override = overridesTable[index].af_mode;
7359        for (size_t k = 0; k < focus_count; k++) {
7360           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7361              supt = true;
7362              break;
7363           }
7364        }
7365        if (supt) {
7366            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7367                    focus_override);
7368            if (NAME_NOT_FOUND != val) {
7369                overridesList[j+2] = (uint8_t)val;
7370            }
7371        } else {
7372           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7373        }
7374        j+=3;
7375    }
7376}
7377
7378/*===========================================================================
7379 * FUNCTION   : filterJpegSizes
7380 *
7381 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7382 *              could be downscaled to
7383 *
7384 * PARAMETERS :
7385 *
7386 * RETURN     : length of jpegSizes array
7387 *==========================================================================*/
7388
7389size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7390        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7391        uint8_t downscale_factor)
7392{
7393    if (0 == downscale_factor) {
7394        downscale_factor = 1;
7395    }
7396
7397    int32_t min_width = active_array_size.width / downscale_factor;
7398    int32_t min_height = active_array_size.height / downscale_factor;
7399    size_t jpegSizesCnt = 0;
7400    if (processedSizesCnt > maxCount) {
7401        processedSizesCnt = maxCount;
7402    }
7403    for (size_t i = 0; i < processedSizesCnt; i+=2) {
7404        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7405            jpegSizes[jpegSizesCnt] = processedSizes[i];
7406            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7407            jpegSizesCnt += 2;
7408        }
7409    }
7410    return jpegSizesCnt;
7411}
7412
7413/*===========================================================================
7414 * FUNCTION   : computeNoiseModelEntryS
7415 *
7416 * DESCRIPTION: function to map a given sensitivity to the S noise
7417 *              model parameters in the DNG noise model.
7418 *
7419 * PARAMETERS : sens : the sensor sensitivity
7420 *
7421 ** RETURN    : S (sensor amplification) noise
7422 *
7423 *==========================================================================*/
7424double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7425    double s = gCamCapability[mCameraId]->gradient_S * sens +
7426            gCamCapability[mCameraId]->offset_S;
7427    return ((s < 0.0) ? 0.0 : s);
7428}
7429
7430/*===========================================================================
7431 * FUNCTION   : computeNoiseModelEntryO
7432 *
7433 * DESCRIPTION: function to map a given sensitivity to the O noise
7434 *              model parameters in the DNG noise model.
7435 *
7436 * PARAMETERS : sens : the sensor sensitivity
7437 *
7438 ** RETURN    : O (sensor readout) noise
7439 *
7440 *==========================================================================*/
7441double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7442    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7443    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7444            1.0 : (1.0 * sens / max_analog_sens);
7445    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7446            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7447    return ((o < 0.0) ? 0.0 : o);
7448}
7449
7450/*===========================================================================
7451 * FUNCTION   : getSensorSensitivity
7452 *
7453 * DESCRIPTION: convert iso_mode to an integer value
7454 *
7455 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7456 *
7457 ** RETURN    : sensitivity supported by sensor
7458 *
7459 *==========================================================================*/
7460int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7461{
7462    int32_t sensitivity;
7463
7464    switch (iso_mode) {
7465    case CAM_ISO_MODE_100:
7466        sensitivity = 100;
7467        break;
7468    case CAM_ISO_MODE_200:
7469        sensitivity = 200;
7470        break;
7471    case CAM_ISO_MODE_400:
7472        sensitivity = 400;
7473        break;
7474    case CAM_ISO_MODE_800:
7475        sensitivity = 800;
7476        break;
7477    case CAM_ISO_MODE_1600:
7478        sensitivity = 1600;
7479        break;
7480    default:
7481        sensitivity = -1;
7482        break;
7483    }
7484    return sensitivity;
7485}
7486
7487/*===========================================================================
7488 * FUNCTION   : getCamInfo
7489 *
7490 * DESCRIPTION: query camera capabilities
7491 *
7492 * PARAMETERS :
7493 *   @cameraId  : camera Id
7494 *   @info      : camera info struct to be filled in with camera capabilities
7495 *
7496 * RETURN     : int type of status
7497 *              NO_ERROR  -- success
7498 *              none-zero failure code
7499 *==========================================================================*/
7500int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7501        struct camera_info *info)
7502{
7503    ATRACE_CALL();
7504    int rc = 0;
7505
7506    pthread_mutex_lock(&gCamLock);
7507    if (NULL == gCamCapability[cameraId]) {
7508        rc = initCapabilities(cameraId);
7509        if (rc < 0) {
7510            pthread_mutex_unlock(&gCamLock);
7511            return rc;
7512        }
7513    }
7514
7515    if (NULL == gStaticMetadata[cameraId]) {
7516        rc = initStaticMetadata(cameraId);
7517        if (rc < 0) {
7518            pthread_mutex_unlock(&gCamLock);
7519            return rc;
7520        }
7521    }
7522
7523    switch(gCamCapability[cameraId]->position) {
7524    case CAM_POSITION_BACK:
7525        info->facing = CAMERA_FACING_BACK;
7526        break;
7527
7528    case CAM_POSITION_FRONT:
7529        info->facing = CAMERA_FACING_FRONT;
7530        break;
7531
7532    default:
7533        LOGE("Unknown position type for camera id:%d", cameraId);
7534        rc = -1;
7535        break;
7536    }
7537
7538
7539    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7540    info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
7541    info->static_camera_characteristics = gStaticMetadata[cameraId];
7542
7543    //For now assume both cameras can operate independently.
7544    info->conflicting_devices = NULL;
7545    info->conflicting_devices_length = 0;
7546
7547    //resource cost is 100 * MIN(1.0, m/M),
7548    //where m is throughput requirement with maximum stream configuration
7549    //and M is CPP maximum throughput.
7550    float max_fps = 0.0;
7551    for (uint32_t i = 0;
7552            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7553        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7554            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7555    }
7556    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7557            gCamCapability[cameraId]->active_array_size.width *
7558            gCamCapability[cameraId]->active_array_size.height * max_fps /
7559            gCamCapability[cameraId]->max_pixel_bandwidth;
7560    info->resource_cost = 100 * MIN(1.0, ratio);
7561    LOGI("camera %d resource cost is %d", cameraId,
7562            info->resource_cost);
7563
7564    pthread_mutex_unlock(&gCamLock);
7565    return rc;
7566}
7567
7568/*===========================================================================
7569 * FUNCTION   : translateCapabilityToMetadata
7570 *
7571 * DESCRIPTION: translate the capability into camera_metadata_t
7572 *
7573 * PARAMETERS : type of the request
7574 *
7575 *
7576 * RETURN     : success: camera_metadata_t*
7577 *              failure: NULL
7578 *
7579 *==========================================================================*/
7580camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7581{
7582    if (mDefaultMetadata[type] != NULL) {
7583        return mDefaultMetadata[type];
7584    }
7585    //first time we are handling this request
7586    //fill up the metadata structure using the wrapper class
7587    CameraMetadata settings;
7588    //translate from cam_capability_t to camera_metadata_tag_t
7589    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7590    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7591    int32_t defaultRequestID = 0;
7592    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7593
7594    /* OIS disable */
7595    char ois_prop[PROPERTY_VALUE_MAX];
7596    memset(ois_prop, 0, sizeof(ois_prop));
7597    property_get("persist.camera.ois.disable", ois_prop, "0");
7598    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7599
7600    /* Force video to use OIS */
7601    char videoOisProp[PROPERTY_VALUE_MAX];
7602    memset(videoOisProp, 0, sizeof(videoOisProp));
7603    property_get("persist.camera.ois.video", videoOisProp, "1");
7604    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7605
7606    // EIS enable/disable
7607    char eis_prop[PROPERTY_VALUE_MAX];
7608    memset(eis_prop, 0, sizeof(eis_prop));
7609    property_get("persist.camera.eis.enable", eis_prop, "0");
7610    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7611
7612    // Hybrid AE enable/disable
7613    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
7614    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
7615    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
7616    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
7617
7618    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7619    // This is a bit hacky. EIS is enabled only when the above setprop
7620    // is set to non-zero value and on back camera (for 2015 Nexus).
7621    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7622    // configureStream is called before this function. In other words,
7623    // we cannot guarantee the app will call configureStream before
7624    // calling createDefaultRequest.
7625    const bool eisEnabled = facingBack && eis_prop_set;
7626
7627    uint8_t controlIntent = 0;
7628    uint8_t focusMode;
7629    uint8_t vsMode;
7630    uint8_t optStabMode;
7631    uint8_t cacMode;
7632    uint8_t edge_mode;
7633    uint8_t noise_red_mode;
7634    uint8_t tonemap_mode;
7635    bool highQualityModeEntryAvailable = FALSE;
7636    bool fastModeEntryAvailable = FALSE;
7637    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7638    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7639    switch (type) {
7640      case CAMERA3_TEMPLATE_PREVIEW:
7641        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7642        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7643        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7644        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7645        edge_mode = ANDROID_EDGE_MODE_FAST;
7646        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7647        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7648        break;
7649      case CAMERA3_TEMPLATE_STILL_CAPTURE:
7650        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7651        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7652        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7653        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7654        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7655        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7656        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7657        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
7658        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
7659            if (gCamCapability[mCameraId]->aberration_modes[i] ==
7660                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
7661                highQualityModeEntryAvailable = TRUE;
7662            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
7663                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
7664                fastModeEntryAvailable = TRUE;
7665            }
7666        }
7667        if (highQualityModeEntryAvailable) {
7668            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7669        } else if (fastModeEntryAvailable) {
7670            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7671        }
7672        break;
7673      case CAMERA3_TEMPLATE_VIDEO_RECORD:
7674        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7675        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7676        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7677        if (eisEnabled) {
7678            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7679        }
7680        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7681        edge_mode = ANDROID_EDGE_MODE_FAST;
7682        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7683        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7684        if (forceVideoOis)
7685            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7686        break;
7687      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7688        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7689        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7690        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7691        if (eisEnabled) {
7692            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7693        }
7694        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7695        edge_mode = ANDROID_EDGE_MODE_FAST;
7696        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7697        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7698        if (forceVideoOis)
7699            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7700        break;
7701      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7702        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7703        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7704        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7705        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7706        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7707        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7708        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7709        break;
7710      case CAMERA3_TEMPLATE_MANUAL:
7711        edge_mode = ANDROID_EDGE_MODE_FAST;
7712        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7713        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7714        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7715        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7716        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7717        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7718        break;
7719      default:
7720        edge_mode = ANDROID_EDGE_MODE_FAST;
7721        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7722        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7723        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7724        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7725        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7726        break;
7727    }
7728    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7729    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7730    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7731    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7732        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7733    }
7734    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7735
7736    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7737            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7738        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7739    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7740            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7741            || ois_disable)
7742        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7743    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7744
7745    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7746            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7747
7748    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7749    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7750
7751    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7752    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7753
7754    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7755    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7756
7757    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7758    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7759
7760    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7761    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7762
7763    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7764    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7765
7766    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7767    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7768
7769    /*flash*/
7770    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7771    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7772
7773    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7774    settings.update(ANDROID_FLASH_FIRING_POWER,
7775            &flashFiringLevel, 1);
7776
7777    /* lens */
7778    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7779    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7780
7781    if (gCamCapability[mCameraId]->filter_densities_count) {
7782        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7783        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7784                        gCamCapability[mCameraId]->filter_densities_count);
7785    }
7786
7787    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7788    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7789
7790    float default_focus_distance = 0;
7791    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7792
7793    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7794    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7795
7796    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7797    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7798
7799    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7800    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7801
7802    /* face detection (default to OFF) */
7803    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7804    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7805
7806    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7807    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7808
7809    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7810    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7811
7812    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7813    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7814
7815    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7816    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7817
7818    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7819    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7820
7821    /* Exposure time(Update the Min Exposure Time)*/
7822    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7823    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7824
7825    /* frame duration */
7826    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7827    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7828
7829    /* sensitivity */
7830    static const int32_t default_sensitivity = 100;
7831    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7832    static const int32_t default_isp_sensitivity =
7833            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
7834    settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
7835
7836    /*edge mode*/
7837    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7838
7839    /*noise reduction mode*/
7840    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7841
7842    /*color correction mode*/
7843    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7844    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7845
7846    /*transform matrix mode*/
7847    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7848
7849    int32_t scaler_crop_region[4];
7850    scaler_crop_region[0] = 0;
7851    scaler_crop_region[1] = 0;
7852    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7853    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7854    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7855
7856    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7857    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7858
7859    /*focus distance*/
7860    float focus_distance = 0.0;
7861    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7862
7863    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7864    float max_range = 0.0;
7865    float max_fixed_fps = 0.0;
7866    int32_t fps_range[2] = {0, 0};
7867    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7868            i++) {
7869        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7870            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7871        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7872                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7873                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7874            if (range > max_range) {
7875                fps_range[0] =
7876                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7877                fps_range[1] =
7878                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7879                max_range = range;
7880            }
7881        } else {
7882            if (range < 0.01 && max_fixed_fps <
7883                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7884                fps_range[0] =
7885                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7886                fps_range[1] =
7887                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7888                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7889            }
7890        }
7891    }
7892    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7893
7894    /*precapture trigger*/
7895    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7896    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7897
7898    /*af trigger*/
7899    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7900    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7901
7902    /* ae & af regions */
7903    int32_t active_region[] = {
7904            gCamCapability[mCameraId]->active_array_size.left,
7905            gCamCapability[mCameraId]->active_array_size.top,
7906            gCamCapability[mCameraId]->active_array_size.left +
7907                    gCamCapability[mCameraId]->active_array_size.width,
7908            gCamCapability[mCameraId]->active_array_size.top +
7909                    gCamCapability[mCameraId]->active_array_size.height,
7910            0};
7911    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7912            sizeof(active_region) / sizeof(active_region[0]));
7913    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7914            sizeof(active_region) / sizeof(active_region[0]));
7915
7916    /* black level lock */
7917    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7918    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7919
7920    /* lens shading map mode */
7921    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7922    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7923        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7924    }
7925    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7926
7927    //special defaults for manual template
7928    if (type == CAMERA3_TEMPLATE_MANUAL) {
7929        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7930        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7931
7932        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7933        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7934
7935        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7936        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7937
7938        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7939        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7940
7941        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7942        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7943
7944        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7945        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7946    }
7947
7948
7949    /* TNR
7950     * We'll use this location to determine which modes TNR will be set.
7951     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7952     * This is not to be confused with linking on a per stream basis that decision
7953     * is still on per-session basis and will be handled as part of config stream
7954     */
7955    uint8_t tnr_enable = 0;
7956
7957    if (m_bTnrPreview || m_bTnrVideo) {
7958
7959        switch (type) {
7960            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7961            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7962                    tnr_enable = 1;
7963                    break;
7964
7965            default:
7966                    tnr_enable = 0;
7967                    break;
7968        }
7969
7970        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7971        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7972        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7973
7974        LOGD("TNR:%d with process plate %d for template:%d",
7975                             tnr_enable, tnr_process_type, type);
7976    }
7977
7978    /* CDS default */
7979    char prop[PROPERTY_VALUE_MAX];
7980    memset(prop, 0, sizeof(prop));
7981    property_get("persist.camera.CDS", prop, "Auto");
7982    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7983    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7984    if (CAM_CDS_MODE_MAX == cds_mode) {
7985        cds_mode = CAM_CDS_MODE_AUTO;
7986    }
7987
7988    /* Disabling CDS in templates which have TNR enabled*/
7989    if (tnr_enable)
7990        cds_mode = CAM_CDS_MODE_OFF;
7991
7992    int32_t mode = cds_mode;
7993    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7994
7995    /* hybrid ae */
7996    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
7997
7998    mDefaultMetadata[type] = settings.release();
7999
8000    return mDefaultMetadata[type];
8001}
8002
8003/*===========================================================================
8004 * FUNCTION   : setFrameParameters
8005 *
8006 * DESCRIPTION: set parameters per frame as requested in the metadata from
8007 *              framework
8008 *
8009 * PARAMETERS :
8010 *   @request   : request that needs to be serviced
8011 *   @streamID : Stream ID of all the requested streams
8012 *   @blob_request: Whether this request is a blob request or not
8013 *
8014 * RETURN     : success: NO_ERROR
8015 *              failure:
8016 *==========================================================================*/
8017int QCamera3HardwareInterface::setFrameParameters(
8018                    camera3_capture_request_t *request,
8019                    cam_stream_ID_t streamID,
8020                    int blob_request,
8021                    uint32_t snapshotStreamId)
8022{
8023    /*translate from camera_metadata_t type to parm_type_t*/
8024    int rc = 0;
8025    int32_t hal_version = CAM_HAL_V3;
8026
8027    clear_metadata_buffer(mParameters);
8028    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8029        LOGE("Failed to set hal version in the parameters");
8030        return BAD_VALUE;
8031    }
8032
8033    /*we need to update the frame number in the parameters*/
8034    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8035            request->frame_number)) {
8036        LOGE("Failed to set the frame number in the parameters");
8037        return BAD_VALUE;
8038    }
8039
8040    /* Update stream id of all the requested buffers */
8041    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8042        LOGE("Failed to set stream type mask in the parameters");
8043        return BAD_VALUE;
8044    }
8045
8046    if (mUpdateDebugLevel) {
8047        uint32_t dummyDebugLevel = 0;
8048        /* The value of dummyDebugLevel is irrelavent. On
8049         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8050        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8051                dummyDebugLevel)) {
8052            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8053            return BAD_VALUE;
8054        }
8055        mUpdateDebugLevel = false;
8056    }
8057
8058    if(request->settings != NULL){
8059        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8060        if (blob_request)
8061            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8062    }
8063
8064    return rc;
8065}
8066
8067/*===========================================================================
8068 * FUNCTION   : setReprocParameters
8069 *
8070 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8071 *              return it.
8072 *
8073 * PARAMETERS :
8074 *   @request   : request that needs to be serviced
8075 *
8076 * RETURN     : success: NO_ERROR
8077 *              failure:
8078 *==========================================================================*/
8079int32_t QCamera3HardwareInterface::setReprocParameters(
8080        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8081        uint32_t snapshotStreamId)
8082{
8083    /*translate from camera_metadata_t type to parm_type_t*/
8084    int rc = 0;
8085
8086    if (NULL == request->settings){
8087        LOGE("Reprocess settings cannot be NULL");
8088        return BAD_VALUE;
8089    }
8090
8091    if (NULL == reprocParam) {
8092        LOGE("Invalid reprocessing metadata buffer");
8093        return BAD_VALUE;
8094    }
8095    clear_metadata_buffer(reprocParam);
8096
8097    /*we need to update the frame number in the parameters*/
8098    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8099            request->frame_number)) {
8100        LOGE("Failed to set the frame number in the parameters");
8101        return BAD_VALUE;
8102    }
8103
8104    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8105    if (rc < 0) {
8106        LOGE("Failed to translate reproc request");
8107        return rc;
8108    }
8109
8110    CameraMetadata frame_settings;
8111    frame_settings = request->settings;
8112    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8113            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8114        int32_t *crop_count =
8115                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8116        int32_t *crop_data =
8117                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8118        int32_t *roi_map =
8119                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8120        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8121            cam_crop_data_t crop_meta;
8122            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8123            crop_meta.num_of_streams = 1;
8124            crop_meta.crop_info[0].crop.left   = crop_data[0];
8125            crop_meta.crop_info[0].crop.top    = crop_data[1];
8126            crop_meta.crop_info[0].crop.width  = crop_data[2];
8127            crop_meta.crop_info[0].crop.height = crop_data[3];
8128
8129            crop_meta.crop_info[0].roi_map.left =
8130                    roi_map[0];
8131            crop_meta.crop_info[0].roi_map.top =
8132                    roi_map[1];
8133            crop_meta.crop_info[0].roi_map.width =
8134                    roi_map[2];
8135            crop_meta.crop_info[0].roi_map.height =
8136                    roi_map[3];
8137
8138            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8139                rc = BAD_VALUE;
8140            }
8141            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8142                    request->input_buffer->stream,
8143                    crop_meta.crop_info[0].crop.left,
8144                    crop_meta.crop_info[0].crop.top,
8145                    crop_meta.crop_info[0].crop.width,
8146                    crop_meta.crop_info[0].crop.height);
8147            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8148                    request->input_buffer->stream,
8149                    crop_meta.crop_info[0].roi_map.left,
8150                    crop_meta.crop_info[0].roi_map.top,
8151                    crop_meta.crop_info[0].roi_map.width,
8152                    crop_meta.crop_info[0].roi_map.height);
8153            } else {
8154                LOGE("Invalid reprocess crop count %d!", *crop_count);
8155            }
8156    } else {
8157        LOGE("No crop data from matching output stream");
8158    }
8159
8160    /* These settings are not needed for regular requests so handle them specially for
8161       reprocess requests; information needed for EXIF tags */
8162    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8163        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8164                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8165        if (NAME_NOT_FOUND != val) {
8166            uint32_t flashMode = (uint32_t)val;
8167            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8168                rc = BAD_VALUE;
8169            }
8170        } else {
8171            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8172                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8173        }
8174    } else {
8175        LOGH("No flash mode in reprocess settings");
8176    }
8177
8178    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8179        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8180        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8181            rc = BAD_VALUE;
8182        }
8183    } else {
8184        LOGH("No flash state in reprocess settings");
8185    }
8186
8187    return rc;
8188}
8189
8190/*===========================================================================
8191 * FUNCTION   : saveRequestSettings
8192 *
8193 * DESCRIPTION: Add any settings that might have changed to the request settings
8194 *              and save the settings to be applied on the frame
8195 *
8196 * PARAMETERS :
8197 *   @jpegMetadata : the extracted and/or modified jpeg metadata
8198 *   @request      : request with initial settings
8199 *
8200 * RETURN     :
8201 * camera_metadata_t* : pointer to the saved request settings
8202 *==========================================================================*/
8203camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8204        const CameraMetadata &jpegMetadata,
8205        camera3_capture_request_t *request)
8206{
8207    camera_metadata_t *resultMetadata;
8208    CameraMetadata camMetadata;
8209    camMetadata = request->settings;
8210
8211    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8212        int32_t thumbnail_size[2];
8213        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8214        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8215        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8216                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8217    }
8218
8219    resultMetadata = camMetadata.release();
8220    return resultMetadata;
8221}
8222
8223/*===========================================================================
8224 * FUNCTION   : setHalFpsRange
8225 *
8226 * DESCRIPTION: set FPS range parameter
8227 *
8228 *
8229 * PARAMETERS :
8230 *   @settings    : Metadata from framework
8231 *   @hal_metadata: Metadata buffer
8232 *
8233 *
8234 * RETURN     : success: NO_ERROR
8235 *              failure:
8236 *==========================================================================*/
8237int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8238        metadata_buffer_t *hal_metadata)
8239{
8240    int32_t rc = NO_ERROR;
8241    cam_fps_range_t fps_range;
8242    fps_range.min_fps = (float)
8243            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8244    fps_range.max_fps = (float)
8245            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8246    fps_range.video_min_fps = fps_range.min_fps;
8247    fps_range.video_max_fps = fps_range.max_fps;
8248
8249    LOGD("aeTargetFpsRange fps: [%f %f]",
8250            fps_range.min_fps, fps_range.max_fps);
8251    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8252     * follows:
8253     * ---------------------------------------------------------------|
8254     *      Video stream is absent in configure_streams               |
8255     *    (Camcorder preview before the first video record            |
8256     * ---------------------------------------------------------------|
8257     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8258     *                   |             |             | vid_min/max_fps|
8259     * ---------------------------------------------------------------|
8260     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8261     *                   |-------------|-------------|----------------|
8262     *                   |  [240, 240] |     240     |  [240, 240]    |
8263     * ---------------------------------------------------------------|
8264     *     Video stream is present in configure_streams               |
8265     * ---------------------------------------------------------------|
8266     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8267     *                   |             |             | vid_min/max_fps|
8268     * ---------------------------------------------------------------|
8269     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8270     * (camcorder prev   |-------------|-------------|----------------|
8271     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
8272     *  is stopped)      |             |             |                |
8273     * ---------------------------------------------------------------|
8274     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
8275     *                   |-------------|-------------|----------------|
8276     *                   |  [240, 240] |     240     |  [240, 240]    |
8277     * ---------------------------------------------------------------|
8278     * When Video stream is absent in configure_streams,
8279     * preview fps = sensor_fps / batchsize
8280     * Eg: for 240fps at batchSize 4, preview = 60fps
8281     *     for 120fps at batchSize 4, preview = 30fps
8282     *
8283     * When video stream is present in configure_streams, preview fps is as per
8284     * the ratio of preview buffers to video buffers requested in process
8285     * capture request
8286     */
8287    mBatchSize = 0;
8288    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8289        fps_range.min_fps = fps_range.video_max_fps;
8290        fps_range.video_min_fps = fps_range.video_max_fps;
8291        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8292                fps_range.max_fps);
8293        if (NAME_NOT_FOUND != val) {
8294            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8295            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8296                return BAD_VALUE;
8297            }
8298
8299            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8300                /* If batchmode is currently in progress and the fps changes,
8301                 * set the flag to restart the sensor */
8302                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8303                        (mHFRVideoFps != fps_range.max_fps)) {
8304                    mNeedSensorRestart = true;
8305                }
8306                mHFRVideoFps = fps_range.max_fps;
8307                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8308                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8309                    mBatchSize = MAX_HFR_BATCH_SIZE;
8310                }
8311             }
8312            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
8313
8314         }
8315    } else {
8316        /* HFR mode is session param in backend/ISP. This should be reset when
8317         * in non-HFR mode  */
8318        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8319        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8320            return BAD_VALUE;
8321        }
8322    }
8323    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8324        return BAD_VALUE;
8325    }
8326    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
8327            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8328    return rc;
8329}
8330
8331/*===========================================================================
8332 * FUNCTION   : translateToHalMetadata
8333 *
8334 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
8335 *
8336 *
8337 * PARAMETERS :
8338 *   @request  : request sent from framework
8339 *
8340 *
8341 * RETURN     : success: NO_ERROR
8342 *              failure:
8343 *==========================================================================*/
8344int QCamera3HardwareInterface::translateToHalMetadata
8345                                  (const camera3_capture_request_t *request,
8346                                   metadata_buffer_t *hal_metadata,
8347                                   uint32_t snapshotStreamId)
8348{
8349    int rc = 0;
8350    CameraMetadata frame_settings;
8351    frame_settings = request->settings;
8352
8353    /* Do not change the order of the following list unless you know what you are
8354     * doing.
8355     * The order is laid out in such a way that parameters in the front of the table
8356     * may be used to override the parameters later in the table. Examples are:
8357     * 1. META_MODE should precede AEC/AWB/AF MODE
8358     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
8359     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
8360     * 4. Any mode should precede it's corresponding settings
8361     */
8362    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
8363        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
8364        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
8365            rc = BAD_VALUE;
8366        }
8367        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8368        if (rc != NO_ERROR) {
8369            LOGE("extractSceneMode failed");
8370        }
8371    }
8372
8373    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8374        uint8_t fwk_aeMode =
8375            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8376        uint8_t aeMode;
8377        int32_t redeye;
8378
8379        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8380            aeMode = CAM_AE_MODE_OFF;
8381        } else {
8382            aeMode = CAM_AE_MODE_ON;
8383        }
8384        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8385            redeye = 1;
8386        } else {
8387            redeye = 0;
8388        }
8389
8390        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8391                fwk_aeMode);
8392        if (NAME_NOT_FOUND != val) {
8393            int32_t flashMode = (int32_t)val;
8394            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8395        }
8396
8397        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8398        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8399            rc = BAD_VALUE;
8400        }
8401    }
8402
8403    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8404        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8405        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8406                fwk_whiteLevel);
8407        if (NAME_NOT_FOUND != val) {
8408            uint8_t whiteLevel = (uint8_t)val;
8409            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8410                rc = BAD_VALUE;
8411            }
8412        }
8413    }
8414
8415    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8416        uint8_t fwk_cacMode =
8417                frame_settings.find(
8418                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8419        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8420                fwk_cacMode);
8421        if (NAME_NOT_FOUND != val) {
8422            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8423            bool entryAvailable = FALSE;
8424            // Check whether Frameworks set CAC mode is supported in device or not
8425            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8426                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
8427                    entryAvailable = TRUE;
8428                    break;
8429                }
8430            }
8431            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
8432            // If entry not found then set the device supported mode instead of frameworks mode i.e,
8433            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
8434            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
8435            if (entryAvailable == FALSE) {
8436                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8437                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8438                } else {
8439                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8440                        // High is not supported and so set the FAST as spec say's underlying
8441                        // device implementation can be the same for both modes.
8442                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
8443                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8444                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
8445                        // in order to avoid the fps drop due to high quality
8446                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8447                    } else {
8448                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8449                    }
8450                }
8451            }
8452            LOGD("Final cacMode is %d", cacMode);
8453            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8454                rc = BAD_VALUE;
8455            }
8456        } else {
8457            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
8458        }
8459    }
8460
8461    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8462        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8463        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8464                fwk_focusMode);
8465        if (NAME_NOT_FOUND != val) {
8466            uint8_t focusMode = (uint8_t)val;
8467            LOGD("set focus mode %d", focusMode);
8468            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8469                rc = BAD_VALUE;
8470            }
8471        }
8472    }
8473
8474    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8475        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8476        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8477                focalDistance)) {
8478            rc = BAD_VALUE;
8479        }
8480    }
8481
8482    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8483        uint8_t fwk_antibandingMode =
8484                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8485        int val = lookupHalName(ANTIBANDING_MODES_MAP,
8486                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8487        if (NAME_NOT_FOUND != val) {
8488            uint32_t hal_antibandingMode = (uint32_t)val;
8489            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8490                    hal_antibandingMode)) {
8491                rc = BAD_VALUE;
8492            }
8493        }
8494    }
8495
8496    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8497        int32_t expCompensation = frame_settings.find(
8498                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8499        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8500            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8501        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8502            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8503        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8504                expCompensation)) {
8505            rc = BAD_VALUE;
8506        }
8507    }
8508
8509    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8510        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8511        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8512            rc = BAD_VALUE;
8513        }
8514    }
8515    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8516        rc = setHalFpsRange(frame_settings, hal_metadata);
8517        if (rc != NO_ERROR) {
8518            LOGE("setHalFpsRange failed");
8519        }
8520    }
8521
8522    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8523        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8524        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8525            rc = BAD_VALUE;
8526        }
8527    }
8528
8529    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8530        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8531        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8532                fwk_effectMode);
8533        if (NAME_NOT_FOUND != val) {
8534            uint8_t effectMode = (uint8_t)val;
8535            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8536                rc = BAD_VALUE;
8537            }
8538        }
8539    }
8540
8541    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8542        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8543        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8544                colorCorrectMode)) {
8545            rc = BAD_VALUE;
8546        }
8547    }
8548
8549    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8550        cam_color_correct_gains_t colorCorrectGains;
8551        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8552            colorCorrectGains.gains[i] =
8553                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8554        }
8555        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8556                colorCorrectGains)) {
8557            rc = BAD_VALUE;
8558        }
8559    }
8560
8561    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8562        cam_color_correct_matrix_t colorCorrectTransform;
8563        cam_rational_type_t transform_elem;
8564        size_t num = 0;
8565        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8566           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8567              transform_elem.numerator =
8568                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8569              transform_elem.denominator =
8570                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8571              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8572              num++;
8573           }
8574        }
8575        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8576                colorCorrectTransform)) {
8577            rc = BAD_VALUE;
8578        }
8579    }
8580
8581    cam_trigger_t aecTrigger;
8582    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8583    aecTrigger.trigger_id = -1;
8584    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8585        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8586        aecTrigger.trigger =
8587            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8588        aecTrigger.trigger_id =
8589            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8590        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8591                aecTrigger)) {
8592            rc = BAD_VALUE;
8593        }
8594        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
8595                aecTrigger.trigger, aecTrigger.trigger_id);
8596    }
8597
8598    /*af_trigger must come with a trigger id*/
8599    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8600        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8601        cam_trigger_t af_trigger;
8602        af_trigger.trigger =
8603            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8604        af_trigger.trigger_id =
8605            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8606        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8607            rc = BAD_VALUE;
8608        }
8609        LOGD("AfTrigger: %d AfTriggerID: %d",
8610                af_trigger.trigger, af_trigger.trigger_id);
8611    }
8612
8613    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8614        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8615        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8616            rc = BAD_VALUE;
8617        }
8618    }
8619    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8620        cam_edge_application_t edge_application;
8621        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8622        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8623            edge_application.sharpness = 0;
8624        } else {
8625            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8626        }
8627        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8628            rc = BAD_VALUE;
8629        }
8630    }
8631
8632    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8633        int32_t respectFlashMode = 1;
8634        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8635            uint8_t fwk_aeMode =
8636                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8637            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8638                respectFlashMode = 0;
8639                LOGH("AE Mode controls flash, ignore android.flash.mode");
8640            }
8641        }
8642        if (respectFlashMode) {
8643            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8644                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8645            LOGH("flash mode after mapping %d", val);
8646            // To check: CAM_INTF_META_FLASH_MODE usage
8647            if (NAME_NOT_FOUND != val) {
8648                uint8_t flashMode = (uint8_t)val;
8649                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8650                    rc = BAD_VALUE;
8651                }
8652            }
8653        }
8654    }
8655
8656    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8657        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8658        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8659            rc = BAD_VALUE;
8660        }
8661    }
8662
8663    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8664        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8665        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8666                flashFiringTime)) {
8667            rc = BAD_VALUE;
8668        }
8669    }
8670
8671    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8672        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8673        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8674                hotPixelMode)) {
8675            rc = BAD_VALUE;
8676        }
8677    }
8678
8679    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8680        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8681        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8682                lensAperture)) {
8683            rc = BAD_VALUE;
8684        }
8685    }
8686
8687    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8688        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8689        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8690                filterDensity)) {
8691            rc = BAD_VALUE;
8692        }
8693    }
8694
8695    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8696        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8697        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8698                focalLength)) {
8699            rc = BAD_VALUE;
8700        }
8701    }
8702
8703    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8704        uint8_t optStabMode =
8705                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8706        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8707                optStabMode)) {
8708            rc = BAD_VALUE;
8709        }
8710    }
8711
8712    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8713        uint8_t videoStabMode =
8714                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8715        LOGD("videoStabMode from APP = %d", videoStabMode);
8716        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8717                videoStabMode)) {
8718            rc = BAD_VALUE;
8719        }
8720    }
8721
8722
8723    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8724        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8725        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8726                noiseRedMode)) {
8727            rc = BAD_VALUE;
8728        }
8729    }
8730
8731    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8732        float reprocessEffectiveExposureFactor =
8733            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8734        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8735                reprocessEffectiveExposureFactor)) {
8736            rc = BAD_VALUE;
8737        }
8738    }
8739
8740    cam_crop_region_t scalerCropRegion;
8741    bool scalerCropSet = false;
8742    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8743        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8744        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8745        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8746        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8747
8748        // Map coordinate system from active array to sensor output.
8749        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8750                scalerCropRegion.width, scalerCropRegion.height);
8751
8752        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8753                scalerCropRegion)) {
8754            rc = BAD_VALUE;
8755        }
8756        scalerCropSet = true;
8757    }
8758
8759    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8760        int64_t sensorExpTime =
8761                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8762        LOGD("setting sensorExpTime %lld", sensorExpTime);
8763        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8764                sensorExpTime)) {
8765            rc = BAD_VALUE;
8766        }
8767    }
8768
8769    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8770        int64_t sensorFrameDuration =
8771                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8772        int64_t minFrameDuration = getMinFrameDuration(request);
8773        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8774        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8775            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8776        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
8777        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8778                sensorFrameDuration)) {
8779            rc = BAD_VALUE;
8780        }
8781    }
8782
8783    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8784        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8785        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8786                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8787        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8788                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8789        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
8790        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8791                sensorSensitivity)) {
8792            rc = BAD_VALUE;
8793        }
8794    }
8795
8796    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
8797        int32_t ispSensitivity =
8798            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
8799        if (ispSensitivity <
8800            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
8801                ispSensitivity =
8802                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8803                LOGD("clamp ispSensitivity to %d", ispSensitivity);
8804        }
8805        if (ispSensitivity >
8806            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
8807                ispSensitivity =
8808                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
8809                LOGD("clamp ispSensitivity to %d", ispSensitivity);
8810        }
8811        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
8812                ispSensitivity)) {
8813            rc = BAD_VALUE;
8814        }
8815    }
8816
8817    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8818        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8819        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8820            rc = BAD_VALUE;
8821        }
8822    }
8823
8824    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8825        uint8_t fwk_facedetectMode =
8826                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8827
8828        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8829                fwk_facedetectMode);
8830
8831        if (NAME_NOT_FOUND != val) {
8832            uint8_t facedetectMode = (uint8_t)val;
8833            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8834                    facedetectMode)) {
8835                rc = BAD_VALUE;
8836            }
8837        }
8838    }
8839
8840    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8841        uint8_t histogramMode =
8842                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8843        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8844                histogramMode)) {
8845            rc = BAD_VALUE;
8846        }
8847    }
8848
8849    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8850        uint8_t sharpnessMapMode =
8851                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8852        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8853                sharpnessMapMode)) {
8854            rc = BAD_VALUE;
8855        }
8856    }
8857
8858    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8859        uint8_t tonemapMode =
8860                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8861        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8862            rc = BAD_VALUE;
8863        }
8864    }
8865    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8866    /*All tonemap channels will have the same number of points*/
8867    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8868        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8869        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8870        cam_rgb_tonemap_curves tonemapCurves;
8871        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8872        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8873            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
8874                     tonemapCurves.tonemap_points_cnt,
8875                    CAM_MAX_TONEMAP_CURVE_SIZE);
8876            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8877        }
8878
8879        /* ch0 = G*/
8880        size_t point = 0;
8881        cam_tonemap_curve_t tonemapCurveGreen;
8882        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8883            for (size_t j = 0; j < 2; j++) {
8884               tonemapCurveGreen.tonemap_points[i][j] =
8885                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8886               point++;
8887            }
8888        }
8889        tonemapCurves.curves[0] = tonemapCurveGreen;
8890
8891        /* ch 1 = B */
8892        point = 0;
8893        cam_tonemap_curve_t tonemapCurveBlue;
8894        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8895            for (size_t j = 0; j < 2; j++) {
8896               tonemapCurveBlue.tonemap_points[i][j] =
8897                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8898               point++;
8899            }
8900        }
8901        tonemapCurves.curves[1] = tonemapCurveBlue;
8902
8903        /* ch 2 = R */
8904        point = 0;
8905        cam_tonemap_curve_t tonemapCurveRed;
8906        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8907            for (size_t j = 0; j < 2; j++) {
8908               tonemapCurveRed.tonemap_points[i][j] =
8909                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8910               point++;
8911            }
8912        }
8913        tonemapCurves.curves[2] = tonemapCurveRed;
8914
8915        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8916                tonemapCurves)) {
8917            rc = BAD_VALUE;
8918        }
8919    }
8920
8921    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8922        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8923        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8924                captureIntent)) {
8925            rc = BAD_VALUE;
8926        }
8927    }
8928
8929    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8930        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8931        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8932                blackLevelLock)) {
8933            rc = BAD_VALUE;
8934        }
8935    }
8936
8937    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8938        uint8_t lensShadingMapMode =
8939                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8940        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8941                lensShadingMapMode)) {
8942            rc = BAD_VALUE;
8943        }
8944    }
8945
8946    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8947        cam_area_t roi;
8948        bool reset = true;
8949        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8950
8951        // Map coordinate system from active array to sensor output.
8952        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8953                roi.rect.height);
8954
8955        if (scalerCropSet) {
8956            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8957        }
8958        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8959            rc = BAD_VALUE;
8960        }
8961    }
8962
8963    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8964        cam_area_t roi;
8965        bool reset = true;
8966        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8967
8968        // Map coordinate system from active array to sensor output.
8969        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8970                roi.rect.height);
8971
8972        if (scalerCropSet) {
8973            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8974        }
8975        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8976            rc = BAD_VALUE;
8977        }
8978    }
8979
8980    // CDS for non-HFR non-video mode
8981    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8982            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
8983        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8984        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8985            LOGE("Invalid CDS mode %d!", *fwk_cds);
8986        } else {
8987            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8988                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8989                rc = BAD_VALUE;
8990            }
8991        }
8992    }
8993
8994    // TNR
8995    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8996        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8997        uint8_t b_TnrRequested = 0;
8998        cam_denoise_param_t tnr;
8999        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9000        tnr.process_plates =
9001            (cam_denoise_process_type_t)frame_settings.find(
9002            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9003        b_TnrRequested = tnr.denoise_enable;
9004        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9005            rc = BAD_VALUE;
9006        }
9007    }
9008
9009    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9010        int32_t fwk_testPatternMode =
9011                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9012        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9013                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9014
9015        if (NAME_NOT_FOUND != testPatternMode) {
9016            cam_test_pattern_data_t testPatternData;
9017            memset(&testPatternData, 0, sizeof(testPatternData));
9018            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9019            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9020                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9021                int32_t *fwk_testPatternData =
9022                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9023                testPatternData.r = fwk_testPatternData[0];
9024                testPatternData.b = fwk_testPatternData[3];
9025                switch (gCamCapability[mCameraId]->color_arrangement) {
9026                    case CAM_FILTER_ARRANGEMENT_RGGB:
9027                    case CAM_FILTER_ARRANGEMENT_GRBG:
9028                        testPatternData.gr = fwk_testPatternData[1];
9029                        testPatternData.gb = fwk_testPatternData[2];
9030                        break;
9031                    case CAM_FILTER_ARRANGEMENT_GBRG:
9032                    case CAM_FILTER_ARRANGEMENT_BGGR:
9033                        testPatternData.gr = fwk_testPatternData[2];
9034                        testPatternData.gb = fwk_testPatternData[1];
9035                        break;
9036                    default:
9037                        LOGE("color arrangement %d is not supported",
9038                                gCamCapability[mCameraId]->color_arrangement);
9039                        break;
9040                }
9041            }
9042            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9043                    testPatternData)) {
9044                rc = BAD_VALUE;
9045            }
9046        } else {
9047            LOGE("Invalid framework sensor test pattern mode %d",
9048                    fwk_testPatternMode);
9049        }
9050    }
9051
9052    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9053        size_t count = 0;
9054        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9055        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9056                gps_coords.data.d, gps_coords.count, count);
9057        if (gps_coords.count != count) {
9058            rc = BAD_VALUE;
9059        }
9060    }
9061
9062    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9063        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9064        size_t count = 0;
9065        const char *gps_methods_src = (const char *)
9066                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9067        memset(gps_methods, '\0', sizeof(gps_methods));
9068        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9069        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9070                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9071        if (GPS_PROCESSING_METHOD_SIZE != count) {
9072            rc = BAD_VALUE;
9073        }
9074    }
9075
9076    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9077        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9078        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9079                gps_timestamp)) {
9080            rc = BAD_VALUE;
9081        }
9082    }
9083
9084    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9085        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9086        cam_rotation_info_t rotation_info;
9087        if (orientation == 0) {
9088           rotation_info.rotation = ROTATE_0;
9089        } else if (orientation == 90) {
9090           rotation_info.rotation = ROTATE_90;
9091        } else if (orientation == 180) {
9092           rotation_info.rotation = ROTATE_180;
9093        } else if (orientation == 270) {
9094           rotation_info.rotation = ROTATE_270;
9095        }
9096        rotation_info.streamId = snapshotStreamId;
9097        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9098        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9099            rc = BAD_VALUE;
9100        }
9101    }
9102
9103    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9104        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9105        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9106            rc = BAD_VALUE;
9107        }
9108    }
9109
9110    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9111        uint32_t thumb_quality = (uint32_t)
9112                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9113        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9114                thumb_quality)) {
9115            rc = BAD_VALUE;
9116        }
9117    }
9118
9119    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9120        cam_dimension_t dim;
9121        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9122        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9123        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9124            rc = BAD_VALUE;
9125        }
9126    }
9127
9128    // Internal metadata
9129    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9130        size_t count = 0;
9131        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9132        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9133                privatedata.data.i32, privatedata.count, count);
9134        if (privatedata.count != count) {
9135            rc = BAD_VALUE;
9136        }
9137    }
9138
9139    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
9140        uint8_t* use_av_timer =
9141                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
9142        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
9143            rc = BAD_VALUE;
9144        }
9145    }
9146
9147    // EV step
9148    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9149            gCamCapability[mCameraId]->exp_compensation_step)) {
9150        rc = BAD_VALUE;
9151    }
9152
9153    // CDS info
9154    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9155        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9156                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9157
9158        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9159                CAM_INTF_META_CDS_DATA, *cdsData)) {
9160            rc = BAD_VALUE;
9161        }
9162    }
9163
9164    // Hybrid AE
9165    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9166        uint8_t *hybrid_ae = (uint8_t *)
9167                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9168
9169        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9170                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9171            rc = BAD_VALUE;
9172        }
9173    }
9174
9175    return rc;
9176}
9177
9178/*===========================================================================
9179 * FUNCTION   : captureResultCb
9180 *
9181 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9182 *
9183 * PARAMETERS :
9184 *   @frame  : frame information from mm-camera-interface
9185 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9186 *   @userdata: userdata
9187 *
9188 * RETURN     : NONE
9189 *==========================================================================*/
9190void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9191                camera3_stream_buffer_t *buffer,
9192                uint32_t frame_number, bool isInputBuffer, void *userdata)
9193{
9194    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9195    if (hw == NULL) {
9196        LOGE("Invalid hw %p", hw);
9197        return;
9198    }
9199
9200    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9201    return;
9202}
9203
9204
9205/*===========================================================================
9206 * FUNCTION   : initialize
9207 *
9208 * DESCRIPTION: Pass framework callback pointers to HAL
9209 *
9210 * PARAMETERS :
9211 *
9212 *
9213 * RETURN     : Success : 0
9214 *              Failure: -ENODEV
9215 *==========================================================================*/
9216
9217int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9218                                  const camera3_callback_ops_t *callback_ops)
9219{
9220    LOGD("E");
9221    QCamera3HardwareInterface *hw =
9222        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9223    if (!hw) {
9224        LOGE("NULL camera device");
9225        return -ENODEV;
9226    }
9227
9228    int rc = hw->initialize(callback_ops);
9229    LOGD("X");
9230    return rc;
9231}
9232
9233/*===========================================================================
9234 * FUNCTION   : configure_streams
9235 *
9236 * DESCRIPTION:
9237 *
9238 * PARAMETERS :
9239 *
9240 *
9241 * RETURN     : Success: 0
9242 *              Failure: -EINVAL (if stream configuration is invalid)
9243 *                       -ENODEV (fatal error)
9244 *==========================================================================*/
9245
9246int QCamera3HardwareInterface::configure_streams(
9247        const struct camera3_device *device,
9248        camera3_stream_configuration_t *stream_list)
9249{
9250    LOGD("E");
9251    QCamera3HardwareInterface *hw =
9252        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9253    if (!hw) {
9254        LOGE("NULL camera device");
9255        return -ENODEV;
9256    }
9257    int rc = hw->configureStreams(stream_list);
9258    LOGD("X");
9259    return rc;
9260}
9261
9262/*===========================================================================
9263 * FUNCTION   : construct_default_request_settings
9264 *
9265 * DESCRIPTION: Configure a settings buffer to meet the required use case
9266 *
9267 * PARAMETERS :
9268 *
9269 *
9270 * RETURN     : Success: Return valid metadata
9271 *              Failure: Return NULL
9272 *==========================================================================*/
9273const camera_metadata_t* QCamera3HardwareInterface::
9274    construct_default_request_settings(const struct camera3_device *device,
9275                                        int type)
9276{
9277
9278    LOGD("E");
9279    camera_metadata_t* fwk_metadata = NULL;
9280    QCamera3HardwareInterface *hw =
9281        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9282    if (!hw) {
9283        LOGE("NULL camera device");
9284        return NULL;
9285    }
9286
9287    fwk_metadata = hw->translateCapabilityToMetadata(type);
9288
9289    LOGD("X");
9290    return fwk_metadata;
9291}
9292
9293/*===========================================================================
9294 * FUNCTION   : process_capture_request
9295 *
9296 * DESCRIPTION:
9297 *
9298 * PARAMETERS :
9299 *
9300 *
9301 * RETURN     :
9302 *==========================================================================*/
9303int QCamera3HardwareInterface::process_capture_request(
9304                    const struct camera3_device *device,
9305                    camera3_capture_request_t *request)
9306{
9307    LOGD("E");
9308    QCamera3HardwareInterface *hw =
9309        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9310    if (!hw) {
9311        LOGE("NULL camera device");
9312        return -EINVAL;
9313    }
9314
9315    int rc = hw->processCaptureRequest(request);
9316    LOGD("X");
9317    return rc;
9318}
9319
9320/*===========================================================================
9321 * FUNCTION   : dump
9322 *
9323 * DESCRIPTION:
9324 *
9325 * PARAMETERS :
9326 *
9327 *
9328 * RETURN     :
9329 *==========================================================================*/
9330
9331void QCamera3HardwareInterface::dump(
9332                const struct camera3_device *device, int fd)
9333{
9334    /* Log level property is read when "adb shell dumpsys media.camera" is
9335       called so that the log level can be controlled without restarting
9336       the media server */
9337    getLogLevel();
9338
9339    LOGD("E");
9340    QCamera3HardwareInterface *hw =
9341        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9342    if (!hw) {
9343        LOGE("NULL camera device");
9344        return;
9345    }
9346
9347    hw->dump(fd);
9348    LOGD("X");
9349    return;
9350}
9351
9352/*===========================================================================
9353 * FUNCTION   : flush
9354 *
9355 * DESCRIPTION:
9356 *
9357 * PARAMETERS :
9358 *
9359 *
9360 * RETURN     :
9361 *==========================================================================*/
9362
9363int QCamera3HardwareInterface::flush(
9364                const struct camera3_device *device)
9365{
9366    int rc;
9367    LOGD("E");
9368    QCamera3HardwareInterface *hw =
9369        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9370    if (!hw) {
9371        LOGE("NULL camera device");
9372        return -EINVAL;
9373    }
9374
9375    pthread_mutex_lock(&hw->mMutex);
9376    // Validate current state
9377    switch (hw->mState) {
9378        case STARTED:
9379            /* valid state */
9380            break;
9381
9382        case ERROR:
9383            pthread_mutex_unlock(&hw->mMutex);
9384            hw->handleCameraDeviceError();
9385            return -ENODEV;
9386
9387        default:
9388            LOGI("Flush returned during state %d", hw->mState);
9389            pthread_mutex_unlock(&hw->mMutex);
9390            return 0;
9391    }
9392    pthread_mutex_unlock(&hw->mMutex);
9393
9394    rc = hw->flush(true /* restart channels */ );
9395    LOGD("X");
9396    return rc;
9397}
9398
9399/*===========================================================================
9400 * FUNCTION   : close_camera_device
9401 *
9402 * DESCRIPTION:
9403 *
9404 * PARAMETERS :
9405 *
9406 *
9407 * RETURN     :
9408 *==========================================================================*/
9409int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
9410{
9411    int ret = NO_ERROR;
9412    QCamera3HardwareInterface *hw =
9413        reinterpret_cast<QCamera3HardwareInterface *>(
9414            reinterpret_cast<camera3_device_t *>(device)->priv);
9415    if (!hw) {
9416        LOGE("NULL camera device");
9417        return BAD_VALUE;
9418    }
9419
9420    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
9421    delete hw;
9422    LOGI("[KPI Perf]: X");
9423    return ret;
9424}
9425
9426/*===========================================================================
9427 * FUNCTION   : getWaveletDenoiseProcessPlate
9428 *
9429 * DESCRIPTION: query wavelet denoise process plate
9430 *
9431 * PARAMETERS : None
9432 *
9433 * RETURN     : WNR prcocess plate value
9434 *==========================================================================*/
9435cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9436{
9437    char prop[PROPERTY_VALUE_MAX];
9438    memset(prop, 0, sizeof(prop));
9439    property_get("persist.denoise.process.plates", prop, "0");
9440    int processPlate = atoi(prop);
9441    switch(processPlate) {
9442    case 0:
9443        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9444    case 1:
9445        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9446    case 2:
9447        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9448    case 3:
9449        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9450    default:
9451        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9452    }
9453}
9454
9455
9456/*===========================================================================
9457 * FUNCTION   : getTemporalDenoiseProcessPlate
9458 *
9459 * DESCRIPTION: query temporal denoise process plate
9460 *
9461 * PARAMETERS : None
9462 *
9463 * RETURN     : TNR prcocess plate value
9464 *==========================================================================*/
9465cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9466{
9467    char prop[PROPERTY_VALUE_MAX];
9468    memset(prop, 0, sizeof(prop));
9469    property_get("persist.tnr.process.plates", prop, "0");
9470    int processPlate = atoi(prop);
9471    switch(processPlate) {
9472    case 0:
9473        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9474    case 1:
9475        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9476    case 2:
9477        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9478    case 3:
9479        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9480    default:
9481        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9482    }
9483}
9484
9485
9486/*===========================================================================
9487 * FUNCTION   : extractSceneMode
9488 *
9489 * DESCRIPTION: Extract scene mode from frameworks set metadata
9490 *
9491 * PARAMETERS :
9492 *      @frame_settings: CameraMetadata reference
9493 *      @metaMode: ANDROID_CONTORL_MODE
9494 *      @hal_metadata: hal metadata structure
9495 *
9496 * RETURN     : None
9497 *==========================================================================*/
9498int32_t QCamera3HardwareInterface::extractSceneMode(
9499        const CameraMetadata &frame_settings, uint8_t metaMode,
9500        metadata_buffer_t *hal_metadata)
9501{
9502    int32_t rc = NO_ERROR;
9503
9504    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9505        camera_metadata_ro_entry entry =
9506                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9507        if (0 == entry.count)
9508            return rc;
9509
9510        uint8_t fwk_sceneMode = entry.data.u8[0];
9511
9512        int val = lookupHalName(SCENE_MODES_MAP,
9513                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9514                fwk_sceneMode);
9515        if (NAME_NOT_FOUND != val) {
9516            uint8_t sceneMode = (uint8_t)val;
9517            LOGD("sceneMode: %d", sceneMode);
9518            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9519                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9520                rc = BAD_VALUE;
9521            }
9522        }
9523    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9524            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9525        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9526        LOGD("sceneMode: %d", sceneMode);
9527        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9528                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9529            rc = BAD_VALUE;
9530        }
9531    }
9532    return rc;
9533}
9534
9535/*===========================================================================
9536 * FUNCTION   : needRotationReprocess
9537 *
9538 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9539 *
9540 * PARAMETERS : none
9541 *
9542 * RETURN     : true: needed
9543 *              false: no need
9544 *==========================================================================*/
9545bool QCamera3HardwareInterface::needRotationReprocess()
9546{
9547    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9548        // current rotation is not zero, and pp has the capability to process rotation
9549        LOGH("need do reprocess for rotation");
9550        return true;
9551    }
9552
9553    return false;
9554}
9555
9556/*===========================================================================
9557 * FUNCTION   : needReprocess
9558 *
9559 * DESCRIPTION: if reprocess in needed
9560 *
9561 * PARAMETERS : none
9562 *
9563 * RETURN     : true: needed
9564 *              false: no need
9565 *==========================================================================*/
9566bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
9567{
9568    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
9569        // TODO: add for ZSL HDR later
9570        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9571        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9572            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
9573            return true;
9574        } else {
9575            LOGH("already post processed frame");
9576            return false;
9577        }
9578    }
9579    return needRotationReprocess();
9580}
9581
9582/*===========================================================================
9583 * FUNCTION   : needJpegExifRotation
9584 *
9585 * DESCRIPTION: if rotation from jpeg is needed
9586 *
9587 * PARAMETERS : none
9588 *
9589 * RETURN     : true: needed
9590 *              false: no need
9591 *==========================================================================*/
9592bool QCamera3HardwareInterface::needJpegExifRotation()
9593{
9594   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9595    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9596       LOGD("Need use Jpeg EXIF Rotation");
9597       return true;
9598    }
9599    return false;
9600}
9601
9602/*===========================================================================
9603 * FUNCTION   : addOfflineReprocChannel
9604 *
9605 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9606 *              coming from input channel
9607 *
9608 * PARAMETERS :
9609 *   @config  : reprocess configuration
9610 *   @inputChHandle : pointer to the input (source) channel
9611 *
9612 *
9613 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
9614 *==========================================================================*/
9615QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9616        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9617{
9618    int32_t rc = NO_ERROR;
9619    QCamera3ReprocessChannel *pChannel = NULL;
9620
9621    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9622            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9623            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9624    if (NULL == pChannel) {
9625        LOGE("no mem for reprocess channel");
9626        return NULL;
9627    }
9628
9629    rc = pChannel->initialize(IS_TYPE_NONE);
9630    if (rc != NO_ERROR) {
9631        LOGE("init reprocess channel failed, ret = %d", rc);
9632        delete pChannel;
9633        return NULL;
9634    }
9635
9636    // pp feature config
9637    cam_pp_feature_config_t pp_config;
9638    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9639
9640    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9641    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
9642            & CAM_QCOM_FEATURE_DSDN) {
9643        //Use CPP CDS incase h/w supports it.
9644        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
9645        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
9646    }
9647    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9648        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
9649    }
9650
9651    rc = pChannel->addReprocStreamsFromSource(pp_config,
9652            config,
9653            IS_TYPE_NONE,
9654            mMetadataChannel);
9655
9656    if (rc != NO_ERROR) {
9657        delete pChannel;
9658        return NULL;
9659    }
9660    return pChannel;
9661}
9662
9663/*===========================================================================
9664 * FUNCTION   : getMobicatMask
9665 *
9666 * DESCRIPTION: returns mobicat mask
9667 *
9668 * PARAMETERS : none
9669 *
9670 * RETURN     : mobicat mask
9671 *
9672 *==========================================================================*/
9673uint8_t QCamera3HardwareInterface::getMobicatMask()
9674{
9675    return m_MobicatMask;
9676}
9677
9678/*===========================================================================
9679 * FUNCTION   : setMobicat
9680 *
9681 * DESCRIPTION: set Mobicat on/off.
9682 *
9683 * PARAMETERS :
9684 *   @params  : none
9685 *
9686 * RETURN     : int32_t type of status
9687 *              NO_ERROR  -- success
9688 *              none-zero failure code
9689 *==========================================================================*/
9690int32_t QCamera3HardwareInterface::setMobicat()
9691{
9692    char value [PROPERTY_VALUE_MAX];
9693    property_get("persist.camera.mobicat", value, "0");
9694    int32_t ret = NO_ERROR;
9695    uint8_t enableMobi = (uint8_t)atoi(value);
9696
9697    if (enableMobi) {
9698        tune_cmd_t tune_cmd;
9699        tune_cmd.type = SET_RELOAD_CHROMATIX;
9700        tune_cmd.module = MODULE_ALL;
9701        tune_cmd.value = TRUE;
9702        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9703                CAM_INTF_PARM_SET_VFE_COMMAND,
9704                tune_cmd);
9705
9706        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9707                CAM_INTF_PARM_SET_PP_COMMAND,
9708                tune_cmd);
9709    }
9710    m_MobicatMask = enableMobi;
9711
9712    return ret;
9713}
9714
9715/*===========================================================================
9716* FUNCTION   : getLogLevel
9717*
9718* DESCRIPTION: Reads the log level property into a variable
9719*
9720* PARAMETERS :
9721*   None
9722*
9723* RETURN     :
9724*   None
9725*==========================================================================*/
9726void QCamera3HardwareInterface::getLogLevel()
9727{
9728    char prop[PROPERTY_VALUE_MAX];
9729    uint32_t globalLogLevel = 0;
9730
9731    property_get("persist.camera.hal.debug", prop, "0");
9732    int val = atoi(prop);
9733    if (0 <= val) {
9734        gCamHal3LogLevel = (uint32_t)val;
9735    }
9736
9737    property_get("persist.camera.kpi.debug", prop, "1");
9738    gKpiDebugLevel = atoi(prop);
9739
9740    property_get("persist.camera.global.debug", prop, "0");
9741    val = atoi(prop);
9742    if (0 <= val) {
9743        globalLogLevel = (uint32_t)val;
9744    }
9745
9746    /* Highest log level among hal.logs and global.logs is selected */
9747    if (gCamHal3LogLevel < globalLogLevel)
9748        gCamHal3LogLevel = globalLogLevel;
9749
9750    return;
9751}
9752
9753/*===========================================================================
9754 * FUNCTION   : validateStreamRotations
9755 *
9756 * DESCRIPTION: Check if the rotations requested are supported
9757 *
9758 * PARAMETERS :
9759 *   @stream_list : streams to be configured
9760 *
9761 * RETURN     : NO_ERROR on success
9762 *              -EINVAL on failure
9763 *
9764 *==========================================================================*/
9765int QCamera3HardwareInterface::validateStreamRotations(
9766        camera3_stream_configuration_t *streamList)
9767{
9768    int rc = NO_ERROR;
9769
9770    /*
9771    * Loop through all streams requested in configuration
9772    * Check if unsupported rotations have been requested on any of them
9773    */
9774    for (size_t j = 0; j < streamList->num_streams; j++){
9775        camera3_stream_t *newStream = streamList->streams[j];
9776
9777        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9778        bool isImplDef = (newStream->format ==
9779                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9780        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9781                isImplDef);
9782
9783        if (isRotated && (!isImplDef || isZsl)) {
9784            LOGE("Error: Unsupported rotation of %d requested for stream"
9785                    "type:%d and stream format:%d",
9786                    newStream->rotation, newStream->stream_type,
9787                    newStream->format);
9788            rc = -EINVAL;
9789            break;
9790        }
9791    }
9792
9793    return rc;
9794}
9795
9796/*===========================================================================
9797* FUNCTION   : getFlashInfo
9798*
9799* DESCRIPTION: Retrieve information about whether the device has a flash.
9800*
9801* PARAMETERS :
9802*   @cameraId  : Camera id to query
9803*   @hasFlash  : Boolean indicating whether there is a flash device
9804*                associated with given camera
9805*   @flashNode : If a flash device exists, this will be its device node.
9806*
9807* RETURN     :
9808*   None
9809*==========================================================================*/
9810void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9811        bool& hasFlash,
9812        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9813{
9814    cam_capability_t* camCapability = gCamCapability[cameraId];
9815    if (NULL == camCapability) {
9816        hasFlash = false;
9817        flashNode[0] = '\0';
9818    } else {
9819        hasFlash = camCapability->flash_available;
9820        strlcpy(flashNode,
9821                (char*)camCapability->flash_dev_name,
9822                QCAMERA_MAX_FILEPATH_LENGTH);
9823    }
9824}
9825
9826/*===========================================================================
9827* FUNCTION   : getEepromVersionInfo
9828*
9829* DESCRIPTION: Retrieve version info of the sensor EEPROM data
9830*
9831* PARAMETERS : None
9832*
9833* RETURN     : string describing EEPROM version
9834*              "\0" if no such info available
9835*==========================================================================*/
9836const char *QCamera3HardwareInterface::getEepromVersionInfo()
9837{
9838    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9839}
9840
9841/*===========================================================================
9842* FUNCTION   : getLdafCalib
9843*
9844* DESCRIPTION: Retrieve Laser AF calibration data
9845*
9846* PARAMETERS : None
9847*
9848* RETURN     : Two uint32_t describing laser AF calibration data
9849*              NULL if none is available.
9850*==========================================================================*/
9851const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9852{
9853    if (mLdafCalibExist) {
9854        return &mLdafCalib[0];
9855    } else {
9856        return NULL;
9857    }
9858}
9859
9860/*===========================================================================
9861 * FUNCTION   : dynamicUpdateMetaStreamInfo
9862 *
9863 * DESCRIPTION: This function:
9864 *             (1) stops all the channels
9865 *             (2) returns error on pending requests and buffers
9866 *             (3) sends metastream_info in setparams
9867 *             (4) starts all channels
9868 *             This is useful when sensor has to be restarted to apply any
9869 *             settings such as frame rate from a different sensor mode
9870 *
9871 * PARAMETERS : None
9872 *
9873 * RETURN     : NO_ERROR on success
9874 *              Error codes on failure
9875 *
9876 *==========================================================================*/
9877int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9878{
9879    ATRACE_CALL();
9880    int rc = NO_ERROR;
9881
9882    LOGD("E");
9883
9884    rc = stopAllChannels();
9885    if (rc < 0) {
9886        LOGE("stopAllChannels failed");
9887        return rc;
9888    }
9889
9890    rc = notifyErrorForPendingRequests();
9891    if (rc < 0) {
9892        LOGE("notifyErrorForPendingRequests failed");
9893        return rc;
9894    }
9895
9896    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
9897        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
9898                "Format:%d",
9899                mStreamConfigInfo.type[i],
9900                mStreamConfigInfo.stream_sizes[i].width,
9901                mStreamConfigInfo.stream_sizes[i].height,
9902                mStreamConfigInfo.postprocess_mask[i],
9903                mStreamConfigInfo.format[i]);
9904    }
9905
9906    /* Send meta stream info once again so that ISP can start */
9907    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9908            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9909    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9910            mParameters);
9911    if (rc < 0) {
9912        LOGE("set Metastreaminfo failed. Sensor mode does not change");
9913    }
9914
9915    rc = startAllChannels();
9916    if (rc < 0) {
9917        LOGE("startAllChannels failed");
9918        return rc;
9919    }
9920
9921    LOGD("X");
9922    return rc;
9923}
9924
9925/*===========================================================================
9926 * FUNCTION   : stopAllChannels
9927 *
9928 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9929 *
9930 * PARAMETERS : None
9931 *
9932 * RETURN     : NO_ERROR on success
9933 *              Error codes on failure
9934 *
9935 *==========================================================================*/
9936int32_t QCamera3HardwareInterface::stopAllChannels()
9937{
9938    int32_t rc = NO_ERROR;
9939
9940    LOGD("Stopping all channels");
9941    // Stop the Streams/Channels
9942    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9943        it != mStreamInfo.end(); it++) {
9944        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9945        if (channel) {
9946            channel->stop();
9947        }
9948        (*it)->status = INVALID;
9949    }
9950
9951    if (mSupportChannel) {
9952        mSupportChannel->stop();
9953    }
9954    if (mAnalysisChannel) {
9955        mAnalysisChannel->stop();
9956    }
9957    if (mRawDumpChannel) {
9958        mRawDumpChannel->stop();
9959    }
9960    if (mMetadataChannel) {
9961        /* If content of mStreamInfo is not 0, there is metadata stream */
9962        mMetadataChannel->stop();
9963    }
9964
9965    LOGD("All channels stopped");
9966    return rc;
9967}
9968
9969/*===========================================================================
9970 * FUNCTION   : startAllChannels
9971 *
9972 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9973 *
9974 * PARAMETERS : None
9975 *
9976 * RETURN     : NO_ERROR on success
9977 *              Error codes on failure
9978 *
9979 *==========================================================================*/
9980int32_t QCamera3HardwareInterface::startAllChannels()
9981{
9982    int32_t rc = NO_ERROR;
9983
9984    LOGD("Start all channels ");
9985    // Start the Streams/Channels
9986    if (mMetadataChannel) {
9987        /* If content of mStreamInfo is not 0, there is metadata stream */
9988        rc = mMetadataChannel->start();
9989        if (rc < 0) {
9990            LOGE("META channel start failed");
9991            return rc;
9992        }
9993    }
9994    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9995        it != mStreamInfo.end(); it++) {
9996        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9997        if (channel) {
9998            rc = channel->start();
9999            if (rc < 0) {
10000                LOGE("channel start failed");
10001                return rc;
10002            }
10003        }
10004    }
10005    if (mAnalysisChannel) {
10006        mAnalysisChannel->start();
10007    }
10008    if (mSupportChannel) {
10009        rc = mSupportChannel->start();
10010        if (rc < 0) {
10011            LOGE("Support channel start failed");
10012            return rc;
10013        }
10014    }
10015    if (mRawDumpChannel) {
10016        rc = mRawDumpChannel->start();
10017        if (rc < 0) {
10018            LOGE("RAW dump channel start failed");
10019            return rc;
10020        }
10021    }
10022
10023    LOGD("All channels started");
10024    return rc;
10025}
10026
10027/*===========================================================================
10028 * FUNCTION   : notifyErrorForPendingRequests
10029 *
10030 * DESCRIPTION: This function sends error for all the pending requests/buffers
10031 *
10032 * PARAMETERS : None
10033 *
10034 * RETURN     : Error codes
10035 *              NO_ERROR on success
10036 *
10037 *==========================================================================*/
10038int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10039{
10040    int32_t rc = NO_ERROR;
10041    unsigned int frameNum = 0;
10042    camera3_capture_result_t result;
10043    camera3_stream_buffer_t *pStream_Buf = NULL;
10044
10045    memset(&result, 0, sizeof(camera3_capture_result_t));
10046
10047    if (mPendingRequestsList.size() > 0) {
10048        pendingRequestIterator i = mPendingRequestsList.begin();
10049        frameNum = i->frame_number;
10050    } else {
10051        /* There might still be pending buffers even though there are
10052         no pending requests. Setting the frameNum to MAX so that
10053         all the buffers with smaller frame numbers are returned */
10054        frameNum = UINT_MAX;
10055    }
10056
10057    LOGH("Oldest frame num on mPendingRequestsList = %u",
10058       frameNum);
10059
10060    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10061            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10062
10063        if (req->frame_number < frameNum) {
10064            // Send Error notify to frameworks for each buffer for which
10065            // metadata buffer is already sent
10066            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10067                req->frame_number, req->mPendingBufferList.size());
10068
10069            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10070            if (NULL == pStream_Buf) {
10071                LOGE("No memory for pending buffers array");
10072                return NO_MEMORY;
10073            }
10074            memset(pStream_Buf, 0,
10075                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10076            result.result = NULL;
10077            result.frame_number = req->frame_number;
10078            result.num_output_buffers = req->mPendingBufferList.size();
10079            result.output_buffers = pStream_Buf;
10080
10081            size_t index = 0;
10082            for (auto info = req->mPendingBufferList.begin();
10083                info != req->mPendingBufferList.end(); ) {
10084
10085                camera3_notify_msg_t notify_msg;
10086                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10087                notify_msg.type = CAMERA3_MSG_ERROR;
10088                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10089                notify_msg.message.error.error_stream = info->stream;
10090                notify_msg.message.error.frame_number = req->frame_number;
10091                pStream_Buf[index].acquire_fence = -1;
10092                pStream_Buf[index].release_fence = -1;
10093                pStream_Buf[index].buffer = info->buffer;
10094                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10095                pStream_Buf[index].stream = info->stream;
10096                mCallbackOps->notify(mCallbackOps, &notify_msg);
10097                index++;
10098                // Remove buffer from list
10099                info = req->mPendingBufferList.erase(info);
10100            }
10101
10102            // Remove this request from Map
10103            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10104                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10105            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10106
10107            mCallbackOps->process_capture_result(mCallbackOps, &result);
10108
10109            delete [] pStream_Buf;
10110        } else {
10111
10112            // Go through the pending requests info and send error request to framework
10113            LOGE("Sending ERROR REQUEST for all pending requests");
10114            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10115
10116            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10117
10118            // Send error notify to frameworks
10119            camera3_notify_msg_t notify_msg;
10120            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10121            notify_msg.type = CAMERA3_MSG_ERROR;
10122            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10123            notify_msg.message.error.error_stream = NULL;
10124            notify_msg.message.error.frame_number = req->frame_number;
10125            mCallbackOps->notify(mCallbackOps, &notify_msg);
10126
10127            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10128            if (NULL == pStream_Buf) {
10129                LOGE("No memory for pending buffers array");
10130                return NO_MEMORY;
10131            }
10132            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10133
10134            result.result = NULL;
10135            result.frame_number = req->frame_number;
10136            result.input_buffer = i->input_buffer;
10137            result.num_output_buffers = req->mPendingBufferList.size();
10138            result.output_buffers = pStream_Buf;
10139
10140            size_t index = 0;
10141            for (auto info = req->mPendingBufferList.begin();
10142                info != req->mPendingBufferList.end(); ) {
10143                pStream_Buf[index].acquire_fence = -1;
10144                pStream_Buf[index].release_fence = -1;
10145                pStream_Buf[index].buffer = info->buffer;
10146                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10147                pStream_Buf[index].stream = info->stream;
10148                index++;
10149                // Remove buffer from list
10150                info = req->mPendingBufferList.erase(info);
10151            }
10152
10153            // Remove this request from Map
10154            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10155                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10156            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10157
10158            mCallbackOps->process_capture_result(mCallbackOps, &result);
10159            delete [] pStream_Buf;
10160            i = erasePendingRequest(i);
10161        }
10162    }
10163
10164    /* Reset pending frame Drop list and requests list */
10165    mPendingFrameDropList.clear();
10166
10167    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10168        req.mPendingBufferList.clear();
10169    }
10170    mPendingBuffersMap.mPendingBuffersInRequest.clear();
10171    mPendingReprocessResultList.clear();
10172    LOGH("Cleared all the pending buffers ");
10173
10174    return rc;
10175}
10176
10177bool QCamera3HardwareInterface::isOnEncoder(
10178        const cam_dimension_t max_viewfinder_size,
10179        uint32_t width, uint32_t height)
10180{
10181    return (width > (uint32_t)max_viewfinder_size.width ||
10182            height > (uint32_t)max_viewfinder_size.height);
10183}
10184
10185/*===========================================================================
10186 * FUNCTION   : setBundleInfo
10187 *
10188 * DESCRIPTION: Set bundle info for all streams that are bundle.
10189 *
10190 * PARAMETERS : None
10191 *
10192 * RETURN     : NO_ERROR on success
10193 *              Error codes on failure
10194 *==========================================================================*/
10195int32_t QCamera3HardwareInterface::setBundleInfo()
10196{
10197    int32_t rc = NO_ERROR;
10198
10199    if (mChannelHandle) {
10200        cam_bundle_config_t bundleInfo;
10201        memset(&bundleInfo, 0, sizeof(bundleInfo));
10202        rc = mCameraHandle->ops->get_bundle_info(
10203                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10204        if (rc != NO_ERROR) {
10205            LOGE("get_bundle_info failed");
10206            return rc;
10207        }
10208        if (mAnalysisChannel) {
10209            mAnalysisChannel->setBundleInfo(bundleInfo);
10210        }
10211        if (mSupportChannel) {
10212            mSupportChannel->setBundleInfo(bundleInfo);
10213        }
10214        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10215                it != mStreamInfo.end(); it++) {
10216            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10217            channel->setBundleInfo(bundleInfo);
10218        }
10219        if (mRawDumpChannel) {
10220            mRawDumpChannel->setBundleInfo(bundleInfo);
10221        }
10222    }
10223
10224    return rc;
10225}
10226
10227/*===========================================================================
10228 * FUNCTION   : get_num_overall_buffers
10229 *
10230 * DESCRIPTION: Estimate number of pending buffers across all requests.
10231 *
10232 * PARAMETERS : None
10233 *
10234 * RETURN     : Number of overall pending buffers
10235 *
10236 *==========================================================================*/
10237uint32_t PendingBuffersMap::get_num_overall_buffers()
10238{
10239    uint32_t sum_buffers = 0;
10240    for (auto &req : mPendingBuffersInRequest) {
10241        sum_buffers += req.mPendingBufferList.size();
10242    }
10243    return sum_buffers;
10244}
10245
10246/*===========================================================================
10247 * FUNCTION   : removeBuf
10248 *
10249 * DESCRIPTION: Remove a matching buffer from tracker.
10250 *
10251 * PARAMETERS : @buffer: image buffer for the callback
10252 *
10253 * RETURN     : None
10254 *
10255 *==========================================================================*/
10256void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10257{
10258    bool buffer_found = false;
10259    for (auto req = mPendingBuffersInRequest.begin();
10260            req != mPendingBuffersInRequest.end(); req++) {
10261        for (auto k = req->mPendingBufferList.begin();
10262                k != req->mPendingBufferList.end(); k++ ) {
10263            if (k->buffer == buffer) {
10264                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10265                        req->frame_number, buffer);
10266                k = req->mPendingBufferList.erase(k);
10267                if (req->mPendingBufferList.empty()) {
10268                    // Remove this request from Map
10269                    req = mPendingBuffersInRequest.erase(req);
10270                }
10271                buffer_found = true;
10272                break;
10273            }
10274        }
10275        if (buffer_found) {
10276            break;
10277        }
10278    }
10279    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
10280            get_num_overall_buffers());
10281}
10282
10283}; //end namespace qcamera
10284