QCamera3HWI.cpp revision 685f4ec43a23654d1409c950139b20b07919bccc
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <sync/sync.h>
44#include "gralloc_priv.h"
45
46// Display dependencies
47#include "qdMetaData.h"
48
49// Camera dependencies
50#include "android/QCamera3External.h"
51#include "util/QCameraFlash.h"
52#include "QCamera3HWI.h"
53#include "QCamera3VendorTags.h"
54#include "QCameraTrace.h"
55
56extern "C" {
57#include "mm_camera_dbg.h"
58}
59
60using namespace android;
61
62namespace qcamera {
63
64#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
65
66#define EMPTY_PIPELINE_DELAY 2
67#define PARTIAL_RESULT_COUNT 2
68#define FRAME_SKIP_DELAY     0
69
70#define MAX_VALUE_8BIT ((1<<8)-1)
71#define MAX_VALUE_10BIT ((1<<10)-1)
72#define MAX_VALUE_12BIT ((1<<12)-1)
73
74#define VIDEO_4K_WIDTH  3840
75#define VIDEO_4K_HEIGHT 2160
76
77#define MAX_EIS_WIDTH 3840
78#define MAX_EIS_HEIGHT 2160
79
80#define MAX_RAW_STREAMS        1
81#define MAX_STALLING_STREAMS   1
82#define MAX_PROCESSED_STREAMS  3
83/* Batch mode is enabled only if FPS set is equal to or greater than this */
84#define MIN_FPS_FOR_BATCH_MODE (120)
85#define PREVIEW_FPS_FOR_HFR    (30)
86#define DEFAULT_VIDEO_FPS      (30.0)
87#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
88#define MAX_HFR_BATCH_SIZE     (8)
89#define REGIONS_TUPLE_COUNT    5
90#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
91// Set a threshold for detection of missing buffers //seconds
92#define MISSING_REQUEST_BUF_TIMEOUT 3
93#define FLUSH_TIMEOUT 3
94#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
95
96#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
97                                              CAM_QCOM_FEATURE_CROP |\
98                                              CAM_QCOM_FEATURE_ROTATION |\
99                                              CAM_QCOM_FEATURE_SHARPNESS |\
100                                              CAM_QCOM_FEATURE_SCALE |\
101                                              CAM_QCOM_FEATURE_CAC |\
102                                              CAM_QCOM_FEATURE_CDS )
103/* Per configuration size for static metadata length*/
104#define PER_CONFIGURATION_SIZE_3 (3)
105
106#define TIMEOUT_NEVER -1
107
108cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
109const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
110extern pthread_mutex_t gCamLock;
111volatile uint32_t gCamHal3LogLevel = 1;
112extern uint8_t gNumCameraSessions;
113
114const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
115    {"On",  CAM_CDS_MODE_ON},
116    {"Off", CAM_CDS_MODE_OFF},
117    {"Auto",CAM_CDS_MODE_AUTO}
118};
119
120const QCamera3HardwareInterface::QCameraMap<
121        camera_metadata_enum_android_control_effect_mode_t,
122        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
123    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
124    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
125    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
126    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
127    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
128    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
129    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
130    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
132};
133
134const QCamera3HardwareInterface::QCameraMap<
135        camera_metadata_enum_android_control_awb_mode_t,
136        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
137    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
138    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
139    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
140    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
141    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
142    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
143    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
145    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
146};
147
148const QCamera3HardwareInterface::QCameraMap<
149        camera_metadata_enum_android_control_scene_mode_t,
150        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
151    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
152    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
153    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
154    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
155    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
157    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
158    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
159    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
160    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
161    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
162    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
163    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
164    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
165    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
166    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
167};
168
169const QCamera3HardwareInterface::QCameraMap<
170        camera_metadata_enum_android_control_af_mode_t,
171        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
172    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
174    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
175    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
176    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
177    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_color_correction_aberration_mode_t,
183        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
184    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
185            CAM_COLOR_CORRECTION_ABERRATION_OFF },
186    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
187            CAM_COLOR_CORRECTION_ABERRATION_FAST },
188    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
189            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
190};
191
192const QCamera3HardwareInterface::QCameraMap<
193        camera_metadata_enum_android_control_ae_antibanding_mode_t,
194        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
195    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_control_ae_mode_t,
203        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
204    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
205    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
207    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
208    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
209};
210
211const QCamera3HardwareInterface::QCameraMap<
212        camera_metadata_enum_android_flash_mode_t,
213        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
214    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
215    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
216    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220        camera_metadata_enum_android_statistics_face_detect_mode_t,
221        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
222    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
229        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
230    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
231      CAM_FOCUS_UNCALIBRATED },
232    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
233      CAM_FOCUS_APPROXIMATE },
234    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
235      CAM_FOCUS_CALIBRATED }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239        camera_metadata_enum_android_lens_state_t,
240        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
241    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
242    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
243};
244
245const int32_t available_thumbnail_sizes[] = {0, 0,
246                                             176, 144,
247                                             240, 144,
248                                             256, 144,
249                                             240, 160,
250                                             256, 154,
251                                             240, 240,
252                                             320, 240};
253
254const QCamera3HardwareInterface::QCameraMap<
255        camera_metadata_enum_android_sensor_test_pattern_mode_t,
256        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
257    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
258    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
259    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
260    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
261    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
262    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
263};
264
265/* Since there is no mapping for all the options some Android enum are not listed.
266 * Also, the order in this list is important because while mapping from HAL to Android it will
267 * traverse from lower to higher index which means that for HAL values that are map to different
268 * Android values, the traverse logic will select the first one found.
269 */
270const QCamera3HardwareInterface::QCameraMap<
271        camera_metadata_enum_android_sensor_reference_illuminant1_t,
272        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
273    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
276    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
277    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
278    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
279    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
280    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
293    { 60, CAM_HFR_MODE_60FPS},
294    { 90, CAM_HFR_MODE_90FPS},
295    { 120, CAM_HFR_MODE_120FPS},
296    { 150, CAM_HFR_MODE_150FPS},
297    { 180, CAM_HFR_MODE_180FPS},
298    { 210, CAM_HFR_MODE_210FPS},
299    { 240, CAM_HFR_MODE_240FPS},
300    { 480, CAM_HFR_MODE_480FPS},
301};
302
303camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
304    .initialize                         = QCamera3HardwareInterface::initialize,
305    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
306    .register_stream_buffers            = NULL,
307    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
308    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
309    .get_metadata_vendor_tag_ops        = NULL,
310    .dump                               = QCamera3HardwareInterface::dump,
311    .flush                              = QCamera3HardwareInterface::flush,
312    .reserved                           = {0},
313};
314
315// initialise to some default value
316uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
317
318/*===========================================================================
319 * FUNCTION   : QCamera3HardwareInterface
320 *
321 * DESCRIPTION: constructor of QCamera3HardwareInterface
322 *
323 * PARAMETERS :
324 *   @cameraId  : camera ID
325 *
326 * RETURN     : none
327 *==========================================================================*/
328QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
329        const camera_module_callbacks_t *callbacks)
330    : mCameraId(cameraId),
331      mCameraHandle(NULL),
332      mCameraInitialized(false),
333      mCallbackOps(NULL),
334      mMetadataChannel(NULL),
335      mPictureChannel(NULL),
336      mRawChannel(NULL),
337      mSupportChannel(NULL),
338      mAnalysisChannel(NULL),
339      mRawDumpChannel(NULL),
340      mDummyBatchChannel(NULL),
341      m_perfLock(),
342      mCommon(),
343      mChannelHandle(0),
344      mFirstConfiguration(true),
345      mFlush(false),
346      mFlushPerf(false),
347      mParamHeap(NULL),
348      mParameters(NULL),
349      mPrevParameters(NULL),
350      m_bIsVideo(false),
351      m_bIs4KVideo(false),
352      m_bEisSupportedSize(false),
353      m_bEisEnable(false),
354      m_MobicatMask(0),
355      mMinProcessedFrameDuration(0),
356      mMinJpegFrameDuration(0),
357      mMinRawFrameDuration(0),
358      mMetaFrameCount(0U),
359      mUpdateDebugLevel(false),
360      mCallbacks(callbacks),
361      mCaptureIntent(0),
362      mCacMode(0),
363      mHybridAeEnable(0),
364      /* DevCamDebug metadata internal m control*/
365      mDevCamDebugMetaEnable(0),
366      /* DevCamDebug metadata end */
367      mBatchSize(0),
368      mToBeQueuedVidBufs(0),
369      mHFRVideoFps(DEFAULT_VIDEO_FPS),
370      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
371      mFirstFrameNumberInBatch(0),
372      mNeedSensorRestart(false),
373      mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
374      mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
375      mLdafCalibExist(false),
376      mPowerHintEnabled(false),
377      mLastCustIntentFrmNum(-1),
378      mState(CLOSED),
379      mIsDeviceLinked(false),
380      mIsMainCamera(true),
381      mLinkedCameraId(0),
382      m_pRelCamSyncHeap(NULL),
383      m_pRelCamSyncBuf(NULL)
384{
385    getLogLevel();
386    m_perfLock.lock_init();
387    mCommon.init(gCamCapability[cameraId]);
388    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
389    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
390    mCameraDevice.common.close = close_camera_device;
391    mCameraDevice.ops = &mCameraOps;
392    mCameraDevice.priv = this;
393    gCamCapability[cameraId]->version = CAM_HAL_V3;
394    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
395    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
396    gCamCapability[cameraId]->min_num_pp_bufs = 3;
397
398    pthread_cond_init(&mBuffersCond, NULL);
399
400    pthread_cond_init(&mRequestCond, NULL);
401    mPendingLiveRequest = 0;
402    mCurrentRequestId = -1;
403    pthread_mutex_init(&mMutex, NULL);
404
405    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
406        mDefaultMetadata[i] = NULL;
407
408    // Getting system props of different kinds
409    char prop[PROPERTY_VALUE_MAX];
410    memset(prop, 0, sizeof(prop));
411    property_get("persist.camera.raw.dump", prop, "0");
412    mEnableRawDump = atoi(prop);
413    if (mEnableRawDump)
414        LOGD("Raw dump from Camera HAL enabled");
415
416    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
417    memset(mLdafCalib, 0, sizeof(mLdafCalib));
418
419    memset(prop, 0, sizeof(prop));
420    property_get("persist.camera.tnr.preview", prop, "0");
421    m_bTnrPreview = (uint8_t)atoi(prop);
422
423    memset(prop, 0, sizeof(prop));
424    property_get("persist.camera.tnr.video", prop, "0");
425    m_bTnrVideo = (uint8_t)atoi(prop);
426
427    memset(prop, 0, sizeof(prop));
428    property_get("persist.camera.avtimer.debug", prop, "0");
429    m_debug_avtimer = (uint8_t)atoi(prop);
430
431    //Load and read GPU library.
432    lib_surface_utils = NULL;
433    LINK_get_surface_pixel_alignment = NULL;
434    mSurfaceStridePadding = CAM_PAD_TO_32;
435    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
436    if (lib_surface_utils) {
437        *(void **)&LINK_get_surface_pixel_alignment =
438                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
439         if (LINK_get_surface_pixel_alignment) {
440             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
441         }
442         dlclose(lib_surface_utils);
443    }
444}
445
446/*===========================================================================
447 * FUNCTION   : ~QCamera3HardwareInterface
448 *
449 * DESCRIPTION: destructor of QCamera3HardwareInterface
450 *
451 * PARAMETERS : none
452 *
453 * RETURN     : none
454 *==========================================================================*/
455QCamera3HardwareInterface::~QCamera3HardwareInterface()
456{
457    LOGD("E");
458
459    /* Turn off current power hint before acquiring perfLock in case they
460     * conflict with each other */
461    disablePowerHint();
462
463    m_perfLock.lock_acq();
464
465    /* We need to stop all streams before deleting any stream */
466    if (mRawDumpChannel) {
467        mRawDumpChannel->stop();
468    }
469
470    // NOTE: 'camera3_stream_t *' objects are already freed at
471    //        this stage by the framework
472    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
473        it != mStreamInfo.end(); it++) {
474        QCamera3ProcessingChannel *channel = (*it)->channel;
475        if (channel) {
476            channel->stop();
477        }
478    }
479    if (mSupportChannel)
480        mSupportChannel->stop();
481
482    if (mAnalysisChannel) {
483        mAnalysisChannel->stop();
484    }
485    if (mMetadataChannel) {
486        mMetadataChannel->stop();
487    }
488    if (mChannelHandle) {
489        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
490                mChannelHandle);
491        LOGD("stopping channel %d", mChannelHandle);
492    }
493
494    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
495        it != mStreamInfo.end(); it++) {
496        QCamera3ProcessingChannel *channel = (*it)->channel;
497        if (channel)
498            delete channel;
499        free (*it);
500    }
501    if (mSupportChannel) {
502        delete mSupportChannel;
503        mSupportChannel = NULL;
504    }
505
506    if (mAnalysisChannel) {
507        delete mAnalysisChannel;
508        mAnalysisChannel = NULL;
509    }
510    if (mRawDumpChannel) {
511        delete mRawDumpChannel;
512        mRawDumpChannel = NULL;
513    }
514    if (mDummyBatchChannel) {
515        delete mDummyBatchChannel;
516        mDummyBatchChannel = NULL;
517    }
518    mPictureChannel = NULL;
519
520    if (mMetadataChannel) {
521        delete mMetadataChannel;
522        mMetadataChannel = NULL;
523    }
524
525    /* Clean up all channels */
526    if (mCameraInitialized) {
527        if(!mFirstConfiguration){
528            //send the last unconfigure
529            cam_stream_size_info_t stream_config_info;
530            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
531            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
532            stream_config_info.buffer_info.max_buffers =
533                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
534            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
535                    stream_config_info);
536            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
537            if (rc < 0) {
538                LOGE("set_parms failed for unconfigure");
539            }
540        }
541        deinitParameters();
542    }
543
544    if (mChannelHandle) {
545        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
546                mChannelHandle);
547        LOGH("deleting channel %d", mChannelHandle);
548        mChannelHandle = 0;
549    }
550
551    if (mState != CLOSED)
552        closeCamera();
553
554    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
555        req.mPendingBufferList.clear();
556    }
557    mPendingBuffersMap.mPendingBuffersInRequest.clear();
558    mPendingReprocessResultList.clear();
559    for (pendingRequestIterator i = mPendingRequestsList.begin();
560            i != mPendingRequestsList.end();) {
561        i = erasePendingRequest(i);
562    }
563    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
564        if (mDefaultMetadata[i])
565            free_camera_metadata(mDefaultMetadata[i]);
566
567    m_perfLock.lock_rel();
568    m_perfLock.lock_deinit();
569
570    pthread_cond_destroy(&mRequestCond);
571
572    pthread_cond_destroy(&mBuffersCond);
573
574    pthread_mutex_destroy(&mMutex);
575    LOGD("X");
576}
577
578/*===========================================================================
579 * FUNCTION   : erasePendingRequest
580 *
581 * DESCRIPTION: function to erase a desired pending request after freeing any
582 *              allocated memory
583 *
584 * PARAMETERS :
585 *   @i       : iterator pointing to pending request to be erased
586 *
587 * RETURN     : iterator pointing to the next request
588 *==========================================================================*/
589QCamera3HardwareInterface::pendingRequestIterator
590        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
591{
592    if (i->input_buffer != NULL) {
593        free(i->input_buffer);
594        i->input_buffer = NULL;
595    }
596    if (i->settings != NULL)
597        free_camera_metadata((camera_metadata_t*)i->settings);
598    return mPendingRequestsList.erase(i);
599}
600
601/*===========================================================================
602 * FUNCTION   : camEvtHandle
603 *
604 * DESCRIPTION: Function registered to mm-camera-interface to handle events
605 *
606 * PARAMETERS :
607 *   @camera_handle : interface layer camera handle
608 *   @evt           : ptr to event
609 *   @user_data     : user data ptr
610 *
611 * RETURN     : none
612 *==========================================================================*/
613void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
614                                          mm_camera_event_t *evt,
615                                          void *user_data)
616{
617    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
618    if (obj && evt) {
619        switch(evt->server_event_type) {
620            case CAM_EVENT_TYPE_DAEMON_DIED:
621                pthread_mutex_lock(&obj->mMutex);
622                obj->mState = ERROR;
623                pthread_mutex_unlock(&obj->mMutex);
624                LOGE("Fatal, camera daemon died");
625                break;
626
627            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
628                LOGD("HAL got request pull from Daemon");
629                pthread_mutex_lock(&obj->mMutex);
630                obj->mWokenUpByDaemon = true;
631                obj->unblockRequestIfNecessary();
632                pthread_mutex_unlock(&obj->mMutex);
633                break;
634
635            default:
636                LOGW("Warning: Unhandled event %d",
637                        evt->server_event_type);
638                break;
639        }
640    } else {
641        LOGE("NULL user_data/evt");
642    }
643}
644
645/*===========================================================================
646 * FUNCTION   : openCamera
647 *
648 * DESCRIPTION: open camera
649 *
650 * PARAMETERS :
651 *   @hw_device  : double ptr for camera device struct
652 *
653 * RETURN     : int32_t type of status
654 *              NO_ERROR  -- success
655 *              none-zero failure code
656 *==========================================================================*/
657int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
658{
659    int rc = 0;
660    if (mState != CLOSED) {
661        *hw_device = NULL;
662        return PERMISSION_DENIED;
663    }
664
665    m_perfLock.lock_acq();
666    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
667             mCameraId);
668
669    rc = openCamera();
670    if (rc == 0) {
671        *hw_device = &mCameraDevice.common;
672    } else
673        *hw_device = NULL;
674
675    m_perfLock.lock_rel();
676    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
677             mCameraId, rc);
678
679    if (rc == NO_ERROR) {
680        mState = OPENED;
681    }
682    return rc;
683}
684
685/*===========================================================================
686 * FUNCTION   : openCamera
687 *
688 * DESCRIPTION: open camera
689 *
690 * PARAMETERS : none
691 *
692 * RETURN     : int32_t type of status
693 *              NO_ERROR  -- success
694 *              none-zero failure code
695 *==========================================================================*/
696int QCamera3HardwareInterface::openCamera()
697{
698    int rc = 0;
699    char value[PROPERTY_VALUE_MAX];
700
701    KPI_ATRACE_CALL();
702    if (mCameraHandle) {
703        LOGE("Failure: Camera already opened");
704        return ALREADY_EXISTS;
705    }
706
707    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
708    if (rc < 0) {
709        LOGE("Failed to reserve flash for camera id: %d",
710                mCameraId);
711        return UNKNOWN_ERROR;
712    }
713
714    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
715    if (rc) {
716        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
717        return rc;
718    }
719
720    if (!mCameraHandle) {
721        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
722        return -ENODEV;
723    }
724
725    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
726            camEvtHandle, (void *)this);
727
728    if (rc < 0) {
729        LOGE("Error, failed to register event callback");
730        /* Not closing camera here since it is already handled in destructor */
731        return FAILED_TRANSACTION;
732    }
733
734    mExifParams.debug_params =
735            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
736    if (mExifParams.debug_params) {
737        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
738    } else {
739        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
740        return NO_MEMORY;
741    }
742    mFirstConfiguration = true;
743
744    //Notify display HAL that a camera session is active.
745    //But avoid calling the same during bootup because camera service might open/close
746    //cameras at boot time during its initialization and display service will also internally
747    //wait for camera service to initialize first while calling this display API, resulting in a
748    //deadlock situation. Since boot time camera open/close calls are made only to fetch
749    //capabilities, no need of this display bw optimization.
750    //Use "service.bootanim.exit" property to know boot status.
751    property_get("service.bootanim.exit", value, "0");
752    if (atoi(value) == 1) {
753        pthread_mutex_lock(&gCamLock);
754        if (gNumCameraSessions++ == 0) {
755            setCameraLaunchStatus(true);
756        }
757        pthread_mutex_unlock(&gCamLock);
758    }
759
760    //fill the session id needed while linking dual cam
761    pthread_mutex_lock(&gCamLock);
762    rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
763        &sessionId[mCameraId]);
764    pthread_mutex_unlock(&gCamLock);
765
766    if (rc < 0) {
767        LOGE("Error, failed to get sessiion id");
768        return UNKNOWN_ERROR;
769    } else {
770        //Allocate related cam sync buffer
771        //this is needed for the payload that goes along with bundling cmd for related
772        //camera use cases
773        m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
774        rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
775        if(rc != OK) {
776            rc = NO_MEMORY;
777            LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
778            return NO_MEMORY;
779        }
780
781        //Map memory for related cam sync buffer
782        rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
783                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
784                m_pRelCamSyncHeap->getFd(0),
785                sizeof(cam_sync_related_sensors_event_info_t),
786                m_pRelCamSyncHeap->getPtr(0));
787        if(rc < 0) {
788            LOGE("Dualcam: failed to map Related cam sync buffer");
789            rc = FAILED_TRANSACTION;
790            return NO_MEMORY;
791        }
792        m_pRelCamSyncBuf =
793                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
794    }
795
796    LOGH("mCameraId=%d",mCameraId);
797
798    return NO_ERROR;
799}
800
801/*===========================================================================
802 * FUNCTION   : closeCamera
803 *
804 * DESCRIPTION: close camera
805 *
806 * PARAMETERS : none
807 *
808 * RETURN     : int32_t type of status
809 *              NO_ERROR  -- success
810 *              none-zero failure code
811 *==========================================================================*/
812int QCamera3HardwareInterface::closeCamera()
813{
814    KPI_ATRACE_CALL();
815    int rc = NO_ERROR;
816    char value[PROPERTY_VALUE_MAX];
817
818    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
819             mCameraId);
820    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
821    mCameraHandle = NULL;
822
823    //reset session id to some invalid id
824    pthread_mutex_lock(&gCamLock);
825    sessionId[mCameraId] = 0xDEADBEEF;
826    pthread_mutex_unlock(&gCamLock);
827
828    //Notify display HAL that there is no active camera session
829    //but avoid calling the same during bootup. Refer to openCamera
830    //for more details.
831    property_get("service.bootanim.exit", value, "0");
832    if (atoi(value) == 1) {
833        pthread_mutex_lock(&gCamLock);
834        if (--gNumCameraSessions == 0) {
835            setCameraLaunchStatus(false);
836        }
837        pthread_mutex_unlock(&gCamLock);
838    }
839
840    if (NULL != m_pRelCamSyncHeap) {
841        m_pRelCamSyncHeap->deallocate();
842        delete m_pRelCamSyncHeap;
843        m_pRelCamSyncHeap = NULL;
844        m_pRelCamSyncBuf = NULL;
845    }
846
847    if (mExifParams.debug_params) {
848        free(mExifParams.debug_params);
849        mExifParams.debug_params = NULL;
850    }
851    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
852        LOGW("Failed to release flash for camera id: %d",
853                mCameraId);
854    }
855    mState = CLOSED;
856    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
857         mCameraId, rc);
858    return rc;
859}
860
861/*===========================================================================
862 * FUNCTION   : initialize
863 *
864 * DESCRIPTION: Initialize frameworks callback functions
865 *
866 * PARAMETERS :
867 *   @callback_ops : callback function to frameworks
868 *
869 * RETURN     :
870 *
871 *==========================================================================*/
872int QCamera3HardwareInterface::initialize(
873        const struct camera3_callback_ops *callback_ops)
874{
875    ATRACE_CALL();
876    int rc;
877
878    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
879    pthread_mutex_lock(&mMutex);
880
881    // Validate current state
882    switch (mState) {
883        case OPENED:
884            /* valid state */
885            break;
886        default:
887            LOGE("Invalid state %d", mState);
888            rc = -ENODEV;
889            goto err1;
890    }
891
892    rc = initParameters();
893    if (rc < 0) {
894        LOGE("initParamters failed %d", rc);
895        goto err1;
896    }
897    mCallbackOps = callback_ops;
898
899    mChannelHandle = mCameraHandle->ops->add_channel(
900            mCameraHandle->camera_handle, NULL, NULL, this);
901    if (mChannelHandle == 0) {
902        LOGE("add_channel failed");
903        rc = -ENOMEM;
904        pthread_mutex_unlock(&mMutex);
905        return rc;
906    }
907
908    pthread_mutex_unlock(&mMutex);
909    mCameraInitialized = true;
910    mState = INITIALIZED;
911    LOGI("X");
912    return 0;
913
914err1:
915    pthread_mutex_unlock(&mMutex);
916    return rc;
917}
918
919/*===========================================================================
920 * FUNCTION   : validateStreamDimensions
921 *
922 * DESCRIPTION: Check if the configuration requested are those advertised
923 *
924 * PARAMETERS :
925 *   @stream_list : streams to be configured
926 *
927 * RETURN     :
928 *
929 *==========================================================================*/
930int QCamera3HardwareInterface::validateStreamDimensions(
931        camera3_stream_configuration_t *streamList)
932{
933    int rc = NO_ERROR;
934    size_t count = 0;
935
936    camera3_stream_t *inputStream = NULL;
937    /*
938    * Loop through all streams to find input stream if it exists*
939    */
940    for (size_t i = 0; i< streamList->num_streams; i++) {
941        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
942            if (inputStream != NULL) {
943                LOGE("Error, Multiple input streams requested");
944                return -EINVAL;
945            }
946            inputStream = streamList->streams[i];
947        }
948    }
949    /*
950    * Loop through all streams requested in configuration
951    * Check if unsupported sizes have been requested on any of them
952    */
953    for (size_t j = 0; j < streamList->num_streams; j++) {
954        bool sizeFound = false;
955        camera3_stream_t *newStream = streamList->streams[j];
956
957        uint32_t rotatedHeight = newStream->height;
958        uint32_t rotatedWidth = newStream->width;
959        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
960                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
961            rotatedHeight = newStream->width;
962            rotatedWidth = newStream->height;
963        }
964
965        /*
966        * Sizes are different for each type of stream format check against
967        * appropriate table.
968        */
969        switch (newStream->format) {
970        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
971        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
972        case HAL_PIXEL_FORMAT_RAW10:
973            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
974            for (size_t i = 0; i < count; i++) {
975                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
976                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
977                    sizeFound = true;
978                    break;
979                }
980            }
981            break;
982        case HAL_PIXEL_FORMAT_BLOB:
983            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
984            /* Verify set size against generated sizes table */
985            for (size_t i = 0; i < count; i++) {
986                if (((int32_t)rotatedWidth ==
987                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
988                        ((int32_t)rotatedHeight ==
989                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
990                    sizeFound = true;
991                    break;
992                }
993            }
994            break;
995        case HAL_PIXEL_FORMAT_YCbCr_420_888:
996        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
997        default:
998            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
999                    || newStream->stream_type == CAMERA3_STREAM_INPUT
1000                    || IS_USAGE_ZSL(newStream->usage)) {
1001                if (((int32_t)rotatedWidth ==
1002                                gCamCapability[mCameraId]->active_array_size.width) &&
1003                                ((int32_t)rotatedHeight ==
1004                                gCamCapability[mCameraId]->active_array_size.height)) {
1005                    sizeFound = true;
1006                    break;
1007                }
1008                /* We could potentially break here to enforce ZSL stream
1009                 * set from frameworks always is full active array size
1010                 * but it is not clear from the spc if framework will always
1011                 * follow that, also we have logic to override to full array
1012                 * size, so keeping the logic lenient at the moment
1013                 */
1014            }
1015            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1016                    MAX_SIZES_CNT);
1017            for (size_t i = 0; i < count; i++) {
1018                if (((int32_t)rotatedWidth ==
1019                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1020                            ((int32_t)rotatedHeight ==
1021                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1022                    sizeFound = true;
1023                    break;
1024                }
1025            }
1026            break;
1027        } /* End of switch(newStream->format) */
1028
1029        /* We error out even if a single stream has unsupported size set */
1030        if (!sizeFound) {
1031            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1032                    rotatedWidth, rotatedHeight, newStream->format,
1033                    gCamCapability[mCameraId]->active_array_size.width,
1034                    gCamCapability[mCameraId]->active_array_size.height);
1035            rc = -EINVAL;
1036            break;
1037        }
1038    } /* End of for each stream */
1039    return rc;
1040}
1041
1042/*==============================================================================
1043 * FUNCTION   : isSupportChannelNeeded
1044 *
1045 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1046 *
1047 * PARAMETERS :
1048 *   @stream_list : streams to be configured
1049 *   @stream_config_info : the config info for streams to be configured
1050 *
1051 * RETURN     : Boolen true/false decision
1052 *
1053 *==========================================================================*/
1054bool QCamera3HardwareInterface::isSupportChannelNeeded(
1055        camera3_stream_configuration_t *streamList,
1056        cam_stream_size_info_t stream_config_info)
1057{
1058    uint32_t i;
1059    bool pprocRequested = false;
1060    /* Check for conditions where PProc pipeline does not have any streams*/
1061    for (i = 0; i < stream_config_info.num_streams; i++) {
1062        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1063                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1064            pprocRequested = true;
1065            break;
1066        }
1067    }
1068
1069    if (pprocRequested == false )
1070        return true;
1071
1072    /* Dummy stream needed if only raw or jpeg streams present */
1073    for (i = 0; i < streamList->num_streams; i++) {
1074        switch(streamList->streams[i]->format) {
1075            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1076            case HAL_PIXEL_FORMAT_RAW10:
1077            case HAL_PIXEL_FORMAT_RAW16:
1078            case HAL_PIXEL_FORMAT_BLOB:
1079                break;
1080            default:
1081                return false;
1082        }
1083    }
1084    return true;
1085}
1086
1087/*==============================================================================
1088 * FUNCTION   : getSensorOutputSize
1089 *
1090 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1091 *
1092 * PARAMETERS :
1093 *   @sensor_dim : sensor output dimension (output)
1094 *
1095 * RETURN     : int32_t type of status
1096 *              NO_ERROR  -- success
1097 *              none-zero failure code
1098 *
1099 *==========================================================================*/
1100int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1101{
1102    int32_t rc = NO_ERROR;
1103
1104    cam_dimension_t max_dim = {0, 0};
1105    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1106        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1107            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1108        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1109            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1110    }
1111
1112    clear_metadata_buffer(mParameters);
1113
1114    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1115            max_dim);
1116    if (rc != NO_ERROR) {
1117        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1118        return rc;
1119    }
1120
1121    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1122    if (rc != NO_ERROR) {
1123        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1124        return rc;
1125    }
1126
1127    clear_metadata_buffer(mParameters);
1128    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1129
1130    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1131            mParameters);
1132    if (rc != NO_ERROR) {
1133        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1134        return rc;
1135    }
1136
1137    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1138    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1139
1140    return rc;
1141}
1142
1143/*==============================================================================
1144 * FUNCTION   : enablePowerHint
1145 *
1146 * DESCRIPTION: enable single powerhint for preview and different video modes.
1147 *
1148 * PARAMETERS :
1149 *
1150 * RETURN     : NULL
1151 *
1152 *==========================================================================*/
1153void QCamera3HardwareInterface::enablePowerHint()
1154{
1155    if (!mPowerHintEnabled) {
1156        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1157        mPowerHintEnabled = true;
1158    }
1159}
1160
1161/*==============================================================================
1162 * FUNCTION   : disablePowerHint
1163 *
1164 * DESCRIPTION: disable current powerhint.
1165 *
1166 * PARAMETERS :
1167 *
1168 * RETURN     : NULL
1169 *
1170 *==========================================================================*/
1171void QCamera3HardwareInterface::disablePowerHint()
1172{
1173    if (mPowerHintEnabled) {
1174        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1175        mPowerHintEnabled = false;
1176    }
1177}
1178
1179/*==============================================================================
1180 * FUNCTION   : addToPPFeatureMask
1181 *
1182 * DESCRIPTION: add additional features to pp feature mask based on
1183 *              stream type and usecase
1184 *
1185 * PARAMETERS :
1186 *   @stream_format : stream type for feature mask
1187 *   @stream_idx : stream idx within postprocess_mask list to change
1188 *
1189 * RETURN     : NULL
1190 *
1191 *==========================================================================*/
1192void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1193        uint32_t stream_idx)
1194{
1195    char feature_mask_value[PROPERTY_VALUE_MAX];
1196    cam_feature_mask_t feature_mask;
1197    int args_converted;
1198    int property_len;
1199
1200    /* Get feature mask from property */
1201    property_len = property_get("persist.camera.hal3.feature",
1202            feature_mask_value, "0");
1203    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1204            (feature_mask_value[1] == 'x')) {
1205        args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1206    } else {
1207        args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1208    }
1209    if (1 != args_converted) {
1210        feature_mask = 0;
1211        LOGE("Wrong feature mask %s", feature_mask_value);
1212        return;
1213    }
1214
1215    switch (stream_format) {
1216    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1217        /* Add LLVD to pp feature mask only if video hint is enabled */
1218        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1219            mStreamConfigInfo.postprocess_mask[stream_idx]
1220                    |= CAM_QTI_FEATURE_SW_TNR;
1221            LOGH("Added SW TNR to pp feature mask");
1222        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1223            mStreamConfigInfo.postprocess_mask[stream_idx]
1224                    |= CAM_QCOM_FEATURE_LLVD;
1225            LOGH("Added LLVD SeeMore to pp feature mask");
1226        }
1227        break;
1228    }
1229    default:
1230        break;
1231    }
1232    LOGD("PP feature mask %llx",
1233            mStreamConfigInfo.postprocess_mask[stream_idx]);
1234}
1235
1236/*==============================================================================
1237 * FUNCTION   : updateFpsInPreviewBuffer
1238 *
1239 * DESCRIPTION: update FPS information in preview buffer.
1240 *
1241 * PARAMETERS :
1242 *   @metadata    : pointer to metadata buffer
1243 *   @frame_number: frame_number to look for in pending buffer list
1244 *
1245 * RETURN     : None
1246 *
1247 *==========================================================================*/
1248void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1249        uint32_t frame_number)
1250{
1251    // Mark all pending buffers for this particular request
1252    // with corresponding framerate information
1253    for (List<PendingBuffersInRequest>::iterator req =
1254            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1255            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1256        for(List<PendingBufferInfo>::iterator j =
1257                req->mPendingBufferList.begin();
1258                j != req->mPendingBufferList.end(); j++) {
1259            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1260            if ((req->frame_number == frame_number) &&
1261                (channel->getStreamTypeMask() &
1262                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1263                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1264                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1265                    int32_t cameraFps = float_range->max_fps;
1266                    struct private_handle_t *priv_handle =
1267                        (struct private_handle_t *)(*(j->buffer));
1268                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1269                }
1270            }
1271        }
1272    }
1273}
1274
1275/*===========================================================================
1276 * FUNCTION   : configureStreams
1277 *
1278 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1279 *              and output streams.
1280 *
1281 * PARAMETERS :
1282 *   @stream_list : streams to be configured
1283 *
1284 * RETURN     :
1285 *
1286 *==========================================================================*/
1287int QCamera3HardwareInterface::configureStreams(
1288        camera3_stream_configuration_t *streamList)
1289{
1290    ATRACE_CALL();
1291    int rc = 0;
1292
1293    // Acquire perfLock before configure streams
1294    m_perfLock.lock_acq();
1295    rc = configureStreamsPerfLocked(streamList);
1296    m_perfLock.lock_rel();
1297
1298    return rc;
1299}
1300
1301/*===========================================================================
1302 * FUNCTION   : configureStreamsPerfLocked
1303 *
1304 * DESCRIPTION: configureStreams while perfLock is held.
1305 *
1306 * PARAMETERS :
1307 *   @stream_list : streams to be configured
1308 *
1309 * RETURN     : int32_t type of status
1310 *              NO_ERROR  -- success
1311 *              none-zero failure code
1312 *==========================================================================*/
1313int QCamera3HardwareInterface::configureStreamsPerfLocked(
1314        camera3_stream_configuration_t *streamList)
1315{
1316    ATRACE_CALL();
1317    int rc = 0;
1318
1319    // Sanity check stream_list
1320    if (streamList == NULL) {
1321        LOGE("NULL stream configuration");
1322        return BAD_VALUE;
1323    }
1324    if (streamList->streams == NULL) {
1325        LOGE("NULL stream list");
1326        return BAD_VALUE;
1327    }
1328
1329    if (streamList->num_streams < 1) {
1330        LOGE("Bad number of streams requested: %d",
1331                streamList->num_streams);
1332        return BAD_VALUE;
1333    }
1334
1335    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1336        LOGE("Maximum number of streams %d exceeded: %d",
1337                MAX_NUM_STREAMS, streamList->num_streams);
1338        return BAD_VALUE;
1339    }
1340
1341    mOpMode = streamList->operation_mode;
1342    LOGD("mOpMode: %d", mOpMode);
1343
1344    /* first invalidate all the steams in the mStreamList
1345     * if they appear again, they will be validated */
1346    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1347            it != mStreamInfo.end(); it++) {
1348        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1349        if (channel) {
1350          channel->stop();
1351        }
1352        (*it)->status = INVALID;
1353    }
1354
1355    if (mRawDumpChannel) {
1356        mRawDumpChannel->stop();
1357        delete mRawDumpChannel;
1358        mRawDumpChannel = NULL;
1359    }
1360
1361    if (mSupportChannel)
1362        mSupportChannel->stop();
1363
1364    if (mAnalysisChannel) {
1365        mAnalysisChannel->stop();
1366    }
1367    if (mMetadataChannel) {
1368        /* If content of mStreamInfo is not 0, there is metadata stream */
1369        mMetadataChannel->stop();
1370    }
1371    if (mChannelHandle) {
1372        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1373                mChannelHandle);
1374        LOGD("stopping channel %d", mChannelHandle);
1375    }
1376
1377    pthread_mutex_lock(&mMutex);
1378
1379    // Check state
1380    switch (mState) {
1381        case INITIALIZED:
1382        case CONFIGURED:
1383        case STARTED:
1384            /* valid state */
1385            break;
1386        default:
1387            LOGE("Invalid state %d", mState);
1388            pthread_mutex_unlock(&mMutex);
1389            return -ENODEV;
1390    }
1391
1392    /* Check whether we have video stream */
1393    m_bIs4KVideo = false;
1394    m_bIsVideo = false;
1395    m_bEisSupportedSize = false;
1396    m_bTnrEnabled = false;
1397    bool isZsl = false;
1398    uint32_t videoWidth = 0U;
1399    uint32_t videoHeight = 0U;
1400    size_t rawStreamCnt = 0;
1401    size_t stallStreamCnt = 0;
1402    size_t processedStreamCnt = 0;
1403    // Number of streams on ISP encoder path
1404    size_t numStreamsOnEncoder = 0;
1405    size_t numYuv888OnEncoder = 0;
1406    bool bYuv888OverrideJpeg = false;
1407    cam_dimension_t largeYuv888Size = {0, 0};
1408    cam_dimension_t maxViewfinderSize = {0, 0};
1409    bool bJpegExceeds4K = false;
1410    bool bJpegOnEncoder = false;
1411    bool bUseCommonFeatureMask = false;
1412    cam_feature_mask_t commonFeatureMask = 0;
1413    bool bSmallJpegSize = false;
1414    uint32_t width_ratio;
1415    uint32_t height_ratio;
1416    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1417    camera3_stream_t *inputStream = NULL;
1418    bool isJpeg = false;
1419    cam_dimension_t jpegSize = {0, 0};
1420
1421    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1422
1423    /*EIS configuration*/
1424    bool eisSupported = false;
1425    bool oisSupported = false;
1426    int32_t margin_index = -1;
1427    uint8_t eis_prop_set;
1428    uint32_t maxEisWidth = 0;
1429    uint32_t maxEisHeight = 0;
1430
1431    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1432
1433    size_t count = IS_TYPE_MAX;
1434    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1435    for (size_t i = 0; i < count; i++) {
1436        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1437            eisSupported = true;
1438            margin_index = (int32_t)i;
1439            break;
1440        }
1441    }
1442
1443    count = CAM_OPT_STAB_MAX;
1444    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1445    for (size_t i = 0; i < count; i++) {
1446        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1447            oisSupported = true;
1448            break;
1449        }
1450    }
1451
1452    if (eisSupported) {
1453        maxEisWidth = MAX_EIS_WIDTH;
1454        maxEisHeight = MAX_EIS_HEIGHT;
1455    }
1456
1457    /* EIS setprop control */
1458    char eis_prop[PROPERTY_VALUE_MAX];
1459    memset(eis_prop, 0, sizeof(eis_prop));
1460    property_get("persist.camera.eis.enable", eis_prop, "0");
1461    eis_prop_set = (uint8_t)atoi(eis_prop);
1462
1463    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1464            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1465
1466    /* stream configurations */
1467    for (size_t i = 0; i < streamList->num_streams; i++) {
1468        camera3_stream_t *newStream = streamList->streams[i];
1469        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1470                "height = %d, rotation = %d, usage = 0x%x",
1471                 i, newStream->stream_type, newStream->format,
1472                newStream->width, newStream->height, newStream->rotation,
1473                newStream->usage);
1474        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1475                newStream->stream_type == CAMERA3_STREAM_INPUT){
1476            isZsl = true;
1477        }
1478        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1479            inputStream = newStream;
1480        }
1481
1482        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1483            isJpeg = true;
1484            jpegSize.width = newStream->width;
1485            jpegSize.height = newStream->height;
1486            if (newStream->width > VIDEO_4K_WIDTH ||
1487                    newStream->height > VIDEO_4K_HEIGHT)
1488                bJpegExceeds4K = true;
1489        }
1490
1491        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1492                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1493            m_bIsVideo = true;
1494            videoWidth = newStream->width;
1495            videoHeight = newStream->height;
1496            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1497                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1498                m_bIs4KVideo = true;
1499            }
1500            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1501                                  (newStream->height <= maxEisHeight);
1502        }
1503        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1504                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1505            switch (newStream->format) {
1506            case HAL_PIXEL_FORMAT_BLOB:
1507                stallStreamCnt++;
1508                if (isOnEncoder(maxViewfinderSize, newStream->width,
1509                        newStream->height)) {
1510                    numStreamsOnEncoder++;
1511                    bJpegOnEncoder = true;
1512                }
1513                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1514                        newStream->width);
1515                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1516                        newStream->height);;
1517                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1518                        "FATAL: max_downscale_factor cannot be zero and so assert");
1519                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1520                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1521                    LOGH("Setting small jpeg size flag to true");
1522                    bSmallJpegSize = true;
1523                }
1524                break;
1525            case HAL_PIXEL_FORMAT_RAW10:
1526            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1527            case HAL_PIXEL_FORMAT_RAW16:
1528                rawStreamCnt++;
1529                break;
1530            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1531                processedStreamCnt++;
1532                if (isOnEncoder(maxViewfinderSize, newStream->width,
1533                        newStream->height)) {
1534                    if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1535                            !IS_USAGE_ZSL(newStream->usage)) {
1536                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1537                    }
1538                    numStreamsOnEncoder++;
1539                }
1540                break;
1541            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1542                processedStreamCnt++;
1543                if (isOnEncoder(maxViewfinderSize, newStream->width,
1544                        newStream->height)) {
1545                    // If Yuv888 size is not greater than 4K, set feature mask
1546                    // to SUPERSET so that it support concurrent request on
1547                    // YUV and JPEG.
1548                    if (newStream->width <= VIDEO_4K_WIDTH &&
1549                            newStream->height <= VIDEO_4K_HEIGHT) {
1550                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1551                    }
1552                    numStreamsOnEncoder++;
1553                    numYuv888OnEncoder++;
1554                    largeYuv888Size.width = newStream->width;
1555                    largeYuv888Size.height = newStream->height;
1556                }
1557                break;
1558            default:
1559                processedStreamCnt++;
1560                if (isOnEncoder(maxViewfinderSize, newStream->width,
1561                        newStream->height)) {
1562                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1563                    numStreamsOnEncoder++;
1564                }
1565                break;
1566            }
1567
1568        }
1569    }
1570
1571    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1572        !m_bIsVideo) {
1573        m_bEisEnable = false;
1574    }
1575
1576    /* Logic to enable/disable TNR based on specific config size/etc.*/
1577    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1578            ((videoWidth == 1920 && videoHeight == 1080) ||
1579            (videoWidth == 1280 && videoHeight == 720)) &&
1580            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1581        m_bTnrEnabled = true;
1582
1583    /* Check if num_streams is sane */
1584    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1585            rawStreamCnt > MAX_RAW_STREAMS ||
1586            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1587        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1588                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1589        pthread_mutex_unlock(&mMutex);
1590        return -EINVAL;
1591    }
1592    /* Check whether we have zsl stream or 4k video case */
1593    if (isZsl && m_bIsVideo) {
1594        LOGE("Currently invalid configuration ZSL&Video!");
1595        pthread_mutex_unlock(&mMutex);
1596        return -EINVAL;
1597    }
1598    /* Check if stream sizes are sane */
1599    if (numStreamsOnEncoder > 2) {
1600        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1601        pthread_mutex_unlock(&mMutex);
1602        return -EINVAL;
1603    } else if (1 < numStreamsOnEncoder){
1604        bUseCommonFeatureMask = true;
1605        LOGH("Multiple streams above max viewfinder size, common mask needed");
1606    }
1607
1608    /* Check if BLOB size is greater than 4k in 4k recording case */
1609    if (m_bIs4KVideo && bJpegExceeds4K) {
1610        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1611        pthread_mutex_unlock(&mMutex);
1612        return -EINVAL;
1613    }
1614
1615    // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1616    // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1617    // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1618    // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1619    // configurations:
1620    //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1621    //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1622    //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1623    if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1624        ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1625                __func__);
1626        pthread_mutex_unlock(&mMutex);
1627        return -EINVAL;
1628    }
1629
1630    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1631    // the YUV stream's size is greater or equal to the JPEG size, set common
1632    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1633    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1634            jpegSize.width, jpegSize.height) &&
1635            largeYuv888Size.width > jpegSize.width &&
1636            largeYuv888Size.height > jpegSize.height) {
1637        bYuv888OverrideJpeg = true;
1638    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1639        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1640    }
1641
1642    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1643            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1644            commonFeatureMask);
1645    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1646            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1647
1648    rc = validateStreamDimensions(streamList);
1649    if (rc == NO_ERROR) {
1650        rc = validateStreamRotations(streamList);
1651    }
1652    if (rc != NO_ERROR) {
1653        LOGE("Invalid stream configuration requested!");
1654        pthread_mutex_unlock(&mMutex);
1655        return rc;
1656    }
1657
1658    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1659    camera3_stream_t *jpegStream = NULL;
1660    for (size_t i = 0; i < streamList->num_streams; i++) {
1661        camera3_stream_t *newStream = streamList->streams[i];
1662        LOGH("newStream type = %d, stream format = %d "
1663                "stream size : %d x %d, stream rotation = %d",
1664                 newStream->stream_type, newStream->format,
1665                newStream->width, newStream->height, newStream->rotation);
1666        //if the stream is in the mStreamList validate it
1667        bool stream_exists = false;
1668        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1669                it != mStreamInfo.end(); it++) {
1670            if ((*it)->stream == newStream) {
1671                QCamera3ProcessingChannel *channel =
1672                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1673                stream_exists = true;
1674                if (channel)
1675                    delete channel;
1676                (*it)->status = VALID;
1677                (*it)->stream->priv = NULL;
1678                (*it)->channel = NULL;
1679            }
1680        }
1681        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1682            //new stream
1683            stream_info_t* stream_info;
1684            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1685            if (!stream_info) {
1686               LOGE("Could not allocate stream info");
1687               rc = -ENOMEM;
1688               pthread_mutex_unlock(&mMutex);
1689               return rc;
1690            }
1691            stream_info->stream = newStream;
1692            stream_info->status = VALID;
1693            stream_info->channel = NULL;
1694            mStreamInfo.push_back(stream_info);
1695        }
1696        /* Covers Opaque ZSL and API1 F/W ZSL */
1697        if (IS_USAGE_ZSL(newStream->usage)
1698                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1699            if (zslStream != NULL) {
1700                LOGE("Multiple input/reprocess streams requested!");
1701                pthread_mutex_unlock(&mMutex);
1702                return BAD_VALUE;
1703            }
1704            zslStream = newStream;
1705        }
1706        /* Covers YUV reprocess */
1707        if (inputStream != NULL) {
1708            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1709                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1710                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1711                    && inputStream->width == newStream->width
1712                    && inputStream->height == newStream->height) {
1713                if (zslStream != NULL) {
1714                    /* This scenario indicates multiple YUV streams with same size
1715                     * as input stream have been requested, since zsl stream handle
1716                     * is solely use for the purpose of overriding the size of streams
1717                     * which share h/w streams we will just make a guess here as to
1718                     * which of the stream is a ZSL stream, this will be refactored
1719                     * once we make generic logic for streams sharing encoder output
1720                     */
1721                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1722                }
1723                zslStream = newStream;
1724            }
1725        }
1726        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1727            jpegStream = newStream;
1728        }
1729    }
1730
1731    /* If a zsl stream is set, we know that we have configured at least one input or
1732       bidirectional stream */
1733    if (NULL != zslStream) {
1734        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1735        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1736        mInputStreamInfo.format = zslStream->format;
1737        mInputStreamInfo.usage = zslStream->usage;
1738        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1739                 mInputStreamInfo.dim.width,
1740                mInputStreamInfo.dim.height,
1741                mInputStreamInfo.format, mInputStreamInfo.usage);
1742    }
1743
1744    cleanAndSortStreamInfo();
1745    if (mMetadataChannel) {
1746        delete mMetadataChannel;
1747        mMetadataChannel = NULL;
1748    }
1749    if (mSupportChannel) {
1750        delete mSupportChannel;
1751        mSupportChannel = NULL;
1752    }
1753
1754    if (mAnalysisChannel) {
1755        delete mAnalysisChannel;
1756        mAnalysisChannel = NULL;
1757    }
1758
1759    if (mDummyBatchChannel) {
1760        delete mDummyBatchChannel;
1761        mDummyBatchChannel = NULL;
1762    }
1763
1764    //Create metadata channel and initialize it
1765    cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1766    setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1767            gCamCapability[mCameraId]->color_arrangement);
1768    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1769                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1770                    &padding_info, metadataFeatureMask, this);
1771    if (mMetadataChannel == NULL) {
1772        LOGE("failed to allocate metadata channel");
1773        rc = -ENOMEM;
1774        pthread_mutex_unlock(&mMutex);
1775        return rc;
1776    }
1777    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1778    if (rc < 0) {
1779        LOGE("metadata channel initialization failed");
1780        delete mMetadataChannel;
1781        mMetadataChannel = NULL;
1782        pthread_mutex_unlock(&mMutex);
1783        return rc;
1784    }
1785
1786    // Create analysis stream all the time, even when h/w support is not available
1787    {
1788        cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1789        cam_analysis_info_t analysisInfo;
1790        rc = mCommon.getAnalysisInfo(
1791                FALSE,
1792                TRUE,
1793                analysisFeatureMask,
1794                &analysisInfo);
1795        if (rc != NO_ERROR) {
1796            LOGE("getAnalysisInfo failed, ret = %d", rc);
1797            pthread_mutex_unlock(&mMutex);
1798            return rc;
1799        }
1800
1801        cam_color_filter_arrangement_t analysis_color_arrangement =
1802                (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
1803                CAM_FILTER_ARRANGEMENT_Y :
1804                gCamCapability[mCameraId]->color_arrangement);
1805        setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1806                analysis_color_arrangement);
1807
1808        mAnalysisChannel = new QCamera3SupportChannel(
1809                mCameraHandle->camera_handle,
1810                mChannelHandle,
1811                mCameraHandle->ops,
1812                &analysisInfo.analysis_padding_info,
1813                analysisFeatureMask,
1814                CAM_STREAM_TYPE_ANALYSIS,
1815                &analysisInfo.analysis_max_res,
1816                (analysisInfo.analysis_format
1817                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1818                : CAM_FORMAT_YUV_420_NV21),
1819                analysisInfo.hw_analysis_supported,
1820                this,
1821                0); // force buffer count to 0
1822        if (!mAnalysisChannel) {
1823            LOGE("H/W Analysis channel cannot be created");
1824            pthread_mutex_unlock(&mMutex);
1825            return -ENOMEM;
1826        }
1827    }
1828
1829    bool isRawStreamRequested = false;
1830    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1831    /* Allocate channel objects for the requested streams */
1832    for (size_t i = 0; i < streamList->num_streams; i++) {
1833        camera3_stream_t *newStream = streamList->streams[i];
1834        uint32_t stream_usage = newStream->usage;
1835        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1836        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1837        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1838                || IS_USAGE_ZSL(newStream->usage)) &&
1839            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1840            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1841            if (bUseCommonFeatureMask) {
1842                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1843                        commonFeatureMask;
1844            } else {
1845                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1846                        CAM_QCOM_FEATURE_NONE;
1847            }
1848
1849        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1850                LOGH("Input stream configured, reprocess config");
1851        } else {
1852            //for non zsl streams find out the format
1853            switch (newStream->format) {
1854            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1855            {
1856                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1857                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1858                /* add additional features to pp feature mask */
1859                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1860                        mStreamConfigInfo.num_streams);
1861
1862                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1863                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1864                                CAM_STREAM_TYPE_VIDEO;
1865                    if (m_bTnrEnabled && m_bTnrVideo) {
1866                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1867                            CAM_QCOM_FEATURE_CPP_TNR;
1868                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1869                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1870                                ~CAM_QCOM_FEATURE_CDS;
1871                    }
1872                } else {
1873                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1874                            CAM_STREAM_TYPE_PREVIEW;
1875                    if (m_bTnrEnabled && m_bTnrPreview) {
1876                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1877                                CAM_QCOM_FEATURE_CPP_TNR;
1878                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1879                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1880                                ~CAM_QCOM_FEATURE_CDS;
1881                    }
1882                    padding_info.width_padding = mSurfaceStridePadding;
1883                    padding_info.height_padding = CAM_PAD_TO_2;
1884                }
1885                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1886                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1887                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1888                            newStream->height;
1889                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1890                            newStream->width;
1891                }
1892            }
1893            break;
1894            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1895                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1896                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1897                    if (bUseCommonFeatureMask)
1898                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1899                                commonFeatureMask;
1900                    else
1901                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1902                                CAM_QCOM_FEATURE_NONE;
1903                } else {
1904                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1905                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1906                }
1907            break;
1908            case HAL_PIXEL_FORMAT_BLOB:
1909                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1910                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1911                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1912                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1913                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1914                } else {
1915                    if (bUseCommonFeatureMask &&
1916                            isOnEncoder(maxViewfinderSize, newStream->width,
1917                            newStream->height)) {
1918                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1919                    } else {
1920                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1921                    }
1922                }
1923                if (isZsl) {
1924                    if (zslStream) {
1925                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1926                                (int32_t)zslStream->width;
1927                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1928                                (int32_t)zslStream->height;
1929                    } else {
1930                        LOGE("Error, No ZSL stream identified");
1931                        pthread_mutex_unlock(&mMutex);
1932                        return -EINVAL;
1933                    }
1934                } else if (m_bIs4KVideo) {
1935                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1936                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1937                } else if (bYuv888OverrideJpeg) {
1938                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1939                            (int32_t)largeYuv888Size.width;
1940                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1941                            (int32_t)largeYuv888Size.height;
1942                }
1943                break;
1944            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1945            case HAL_PIXEL_FORMAT_RAW16:
1946            case HAL_PIXEL_FORMAT_RAW10:
1947                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1948                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1949                isRawStreamRequested = true;
1950                break;
1951            default:
1952                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1953                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1954                break;
1955            }
1956        }
1957
1958        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1959                (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1960                gCamCapability[mCameraId]->color_arrangement);
1961
1962        if (newStream->priv == NULL) {
1963            //New stream, construct channel
1964            switch (newStream->stream_type) {
1965            case CAMERA3_STREAM_INPUT:
1966                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1967                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1968                break;
1969            case CAMERA3_STREAM_BIDIRECTIONAL:
1970                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1971                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1972                break;
1973            case CAMERA3_STREAM_OUTPUT:
1974                /* For video encoding stream, set read/write rarely
1975                 * flag so that they may be set to un-cached */
1976                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1977                    newStream->usage |=
1978                         (GRALLOC_USAGE_SW_READ_RARELY |
1979                         GRALLOC_USAGE_SW_WRITE_RARELY |
1980                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1981                else if (IS_USAGE_ZSL(newStream->usage))
1982                {
1983                    LOGD("ZSL usage flag skipping");
1984                }
1985                else if (newStream == zslStream
1986                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1987                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1988                } else
1989                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1990                break;
1991            default:
1992                LOGE("Invalid stream_type %d", newStream->stream_type);
1993                break;
1994            }
1995
1996            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1997                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1998                QCamera3ProcessingChannel *channel = NULL;
1999                switch (newStream->format) {
2000                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2001                    if ((newStream->usage &
2002                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2003                            (streamList->operation_mode ==
2004                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2005                    ) {
2006                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2007                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2008                                &gCamCapability[mCameraId]->padding_info,
2009                                this,
2010                                newStream,
2011                                (cam_stream_type_t)
2012                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2013                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2014                                mMetadataChannel,
2015                                0); //heap buffers are not required for HFR video channel
2016                        if (channel == NULL) {
2017                            LOGE("allocation of channel failed");
2018                            pthread_mutex_unlock(&mMutex);
2019                            return -ENOMEM;
2020                        }
2021                        //channel->getNumBuffers() will return 0 here so use
2022                        //MAX_INFLIGH_HFR_REQUESTS
2023                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2024                        newStream->priv = channel;
2025                        LOGI("num video buffers in HFR mode: %d",
2026                                 MAX_INFLIGHT_HFR_REQUESTS);
2027                    } else {
2028                        /* Copy stream contents in HFR preview only case to create
2029                         * dummy batch channel so that sensor streaming is in
2030                         * HFR mode */
2031                        if (!m_bIsVideo && (streamList->operation_mode ==
2032                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2033                            mDummyBatchStream = *newStream;
2034                        }
2035                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2036                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2037                                &gCamCapability[mCameraId]->padding_info,
2038                                this,
2039                                newStream,
2040                                (cam_stream_type_t)
2041                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2042                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2043                                mMetadataChannel,
2044                                MAX_INFLIGHT_REQUESTS);
2045                        if (channel == NULL) {
2046                            LOGE("allocation of channel failed");
2047                            pthread_mutex_unlock(&mMutex);
2048                            return -ENOMEM;
2049                        }
2050                        newStream->max_buffers = MAX_INFLIGHT_60FPS_REQUESTS;
2051                        newStream->priv = channel;
2052                    }
2053                    break;
2054                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2055                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2056                            mChannelHandle,
2057                            mCameraHandle->ops, captureResultCb,
2058                            &padding_info,
2059                            this,
2060                            newStream,
2061                            (cam_stream_type_t)
2062                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2063                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2064                            mMetadataChannel);
2065                    if (channel == NULL) {
2066                        LOGE("allocation of YUV channel failed");
2067                        pthread_mutex_unlock(&mMutex);
2068                        return -ENOMEM;
2069                    }
2070                    newStream->max_buffers = channel->getNumBuffers();
2071                    newStream->priv = channel;
2072                    break;
2073                }
2074                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2075                case HAL_PIXEL_FORMAT_RAW16:
2076                case HAL_PIXEL_FORMAT_RAW10:
2077                    mRawChannel = new QCamera3RawChannel(
2078                            mCameraHandle->camera_handle, mChannelHandle,
2079                            mCameraHandle->ops, captureResultCb,
2080                            &padding_info,
2081                            this, newStream,
2082                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2083                            mMetadataChannel,
2084                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2085                    if (mRawChannel == NULL) {
2086                        LOGE("allocation of raw channel failed");
2087                        pthread_mutex_unlock(&mMutex);
2088                        return -ENOMEM;
2089                    }
2090                    newStream->max_buffers = mRawChannel->getNumBuffers();
2091                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2092                    break;
2093                case HAL_PIXEL_FORMAT_BLOB:
2094                    // Max live snapshot inflight buffer is 1. This is to mitigate
2095                    // frame drop issues for video snapshot. The more buffers being
2096                    // allocated, the more frame drops there are.
2097                    mPictureChannel = new QCamera3PicChannel(
2098                            mCameraHandle->camera_handle, mChannelHandle,
2099                            mCameraHandle->ops, captureResultCb,
2100                            &padding_info, this, newStream,
2101                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2102                            m_bIs4KVideo, isZsl, mMetadataChannel,
2103                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2104                    if (mPictureChannel == NULL) {
2105                        LOGE("allocation of channel failed");
2106                        pthread_mutex_unlock(&mMutex);
2107                        return -ENOMEM;
2108                    }
2109                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2110                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2111                    mPictureChannel->overrideYuvSize(
2112                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2113                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2114                    break;
2115
2116                default:
2117                    LOGE("not a supported format 0x%x", newStream->format);
2118                    break;
2119                }
2120            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2121                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2122            } else {
2123                LOGE("Error, Unknown stream type");
2124                pthread_mutex_unlock(&mMutex);
2125                return -EINVAL;
2126            }
2127
2128            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2129            if (channel != NULL && channel->isUBWCEnabled()) {
2130                cam_format_t fmt = channel->getStreamDefaultFormat(
2131                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2132                        newStream->width, newStream->height);
2133                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2134                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2135                }
2136            }
2137
2138            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2139                    it != mStreamInfo.end(); it++) {
2140                if ((*it)->stream == newStream) {
2141                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2142                    break;
2143                }
2144            }
2145        } else {
2146            // Channel already exists for this stream
2147            // Do nothing for now
2148        }
2149        padding_info = gCamCapability[mCameraId]->padding_info;
2150
2151        /* Do not add entries for input stream in metastream info
2152         * since there is no real stream associated with it
2153         */
2154        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2155            mStreamConfigInfo.num_streams++;
2156    }
2157
2158    //RAW DUMP channel
2159    if (mEnableRawDump && isRawStreamRequested == false){
2160        cam_dimension_t rawDumpSize;
2161        rawDumpSize = getMaxRawSize(mCameraId);
2162        cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2163        setPAAFSupport(rawDumpFeatureMask,
2164                CAM_STREAM_TYPE_RAW,
2165                gCamCapability[mCameraId]->color_arrangement);
2166        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2167                                  mChannelHandle,
2168                                  mCameraHandle->ops,
2169                                  rawDumpSize,
2170                                  &padding_info,
2171                                  this, rawDumpFeatureMask);
2172        if (!mRawDumpChannel) {
2173            LOGE("Raw Dump channel cannot be created");
2174            pthread_mutex_unlock(&mMutex);
2175            return -ENOMEM;
2176        }
2177    }
2178
2179
2180    if (mAnalysisChannel) {
2181        cam_analysis_info_t analysisInfo;
2182        memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2183        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2184                CAM_STREAM_TYPE_ANALYSIS;
2185        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2186                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2187        rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2188                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2189                &analysisInfo);
2190        if (rc != NO_ERROR) {
2191            LOGE("getAnalysisInfo failed, ret = %d", rc);
2192            pthread_mutex_unlock(&mMutex);
2193            return rc;
2194        }
2195        cam_color_filter_arrangement_t analysis_color_arrangement =
2196                (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2197                CAM_FILTER_ARRANGEMENT_Y :
2198                gCamCapability[mCameraId]->color_arrangement);
2199        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2200                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2201                analysis_color_arrangement);
2202
2203        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2204                analysisInfo.analysis_max_res;
2205        mStreamConfigInfo.num_streams++;
2206    }
2207
2208    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2209        cam_analysis_info_t supportInfo;
2210        memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2211        cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2212        setPAAFSupport(callbackFeatureMask,
2213                CAM_STREAM_TYPE_CALLBACK,
2214                gCamCapability[mCameraId]->color_arrangement);
2215        rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2216        if (rc != NO_ERROR) {
2217            LOGE("getAnalysisInfo failed, ret = %d", rc);
2218            pthread_mutex_unlock(&mMutex);
2219            return rc;
2220        }
2221        mSupportChannel = new QCamera3SupportChannel(
2222                mCameraHandle->camera_handle,
2223                mChannelHandle,
2224                mCameraHandle->ops,
2225                &gCamCapability[mCameraId]->padding_info,
2226                callbackFeatureMask,
2227                CAM_STREAM_TYPE_CALLBACK,
2228                &QCamera3SupportChannel::kDim,
2229                CAM_FORMAT_YUV_420_NV21,
2230                supportInfo.hw_analysis_supported,
2231                this, 0);
2232        if (!mSupportChannel) {
2233            LOGE("dummy channel cannot be created");
2234            pthread_mutex_unlock(&mMutex);
2235            return -ENOMEM;
2236        }
2237    }
2238
2239    if (mSupportChannel) {
2240        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2241                QCamera3SupportChannel::kDim;
2242        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2243                CAM_STREAM_TYPE_CALLBACK;
2244        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2245                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2246        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2247                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2248                gCamCapability[mCameraId]->color_arrangement);
2249        mStreamConfigInfo.num_streams++;
2250    }
2251
2252    if (mRawDumpChannel) {
2253        cam_dimension_t rawSize;
2254        rawSize = getMaxRawSize(mCameraId);
2255        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2256                rawSize;
2257        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2258                CAM_STREAM_TYPE_RAW;
2259        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2260                CAM_QCOM_FEATURE_NONE;
2261        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2262                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2263                gCamCapability[mCameraId]->color_arrangement);
2264        mStreamConfigInfo.num_streams++;
2265    }
2266    /* In HFR mode, if video stream is not added, create a dummy channel so that
2267     * ISP can create a batch mode even for preview only case. This channel is
2268     * never 'start'ed (no stream-on), it is only 'initialized'  */
2269    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2270            !m_bIsVideo) {
2271        cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2272        setPAAFSupport(dummyFeatureMask,
2273                CAM_STREAM_TYPE_VIDEO,
2274                gCamCapability[mCameraId]->color_arrangement);
2275        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2276                mChannelHandle,
2277                mCameraHandle->ops, captureResultCb,
2278                &gCamCapability[mCameraId]->padding_info,
2279                this,
2280                &mDummyBatchStream,
2281                CAM_STREAM_TYPE_VIDEO,
2282                dummyFeatureMask,
2283                mMetadataChannel);
2284        if (NULL == mDummyBatchChannel) {
2285            LOGE("creation of mDummyBatchChannel failed."
2286                    "Preview will use non-hfr sensor mode ");
2287        }
2288    }
2289    if (mDummyBatchChannel) {
2290        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2291                mDummyBatchStream.width;
2292        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2293                mDummyBatchStream.height;
2294        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2295                CAM_STREAM_TYPE_VIDEO;
2296        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2297                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2298        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2299                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2300                gCamCapability[mCameraId]->color_arrangement);
2301        mStreamConfigInfo.num_streams++;
2302    }
2303
2304    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2305    mStreamConfigInfo.buffer_info.max_buffers =
2306            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2307
2308    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2309    for (pendingRequestIterator i = mPendingRequestsList.begin();
2310            i != mPendingRequestsList.end();) {
2311        i = erasePendingRequest(i);
2312    }
2313    mPendingFrameDropList.clear();
2314    // Initialize/Reset the pending buffers list
2315    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2316        req.mPendingBufferList.clear();
2317    }
2318    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2319
2320    mPendingReprocessResultList.clear();
2321
2322    mCurJpegMeta.clear();
2323    //Get min frame duration for this streams configuration
2324    deriveMinFrameDuration();
2325
2326    // Update state
2327    mState = CONFIGURED;
2328
2329    pthread_mutex_unlock(&mMutex);
2330
2331    return rc;
2332}
2333
2334/*===========================================================================
2335 * FUNCTION   : validateCaptureRequest
2336 *
2337 * DESCRIPTION: validate a capture request from camera service
2338 *
2339 * PARAMETERS :
2340 *   @request : request from framework to process
2341 *
2342 * RETURN     :
2343 *
2344 *==========================================================================*/
2345int QCamera3HardwareInterface::validateCaptureRequest(
2346                    camera3_capture_request_t *request)
2347{
2348    ssize_t idx = 0;
2349    const camera3_stream_buffer_t *b;
2350    CameraMetadata meta;
2351
2352    /* Sanity check the request */
2353    if (request == NULL) {
2354        LOGE("NULL capture request");
2355        return BAD_VALUE;
2356    }
2357
2358    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2359        /*settings cannot be null for the first request*/
2360        return BAD_VALUE;
2361    }
2362
2363    uint32_t frameNumber = request->frame_number;
2364    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2365        LOGE("Request %d: No output buffers provided!",
2366                __FUNCTION__, frameNumber);
2367        return BAD_VALUE;
2368    }
2369    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2370        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2371                 request->num_output_buffers, MAX_NUM_STREAMS);
2372        return BAD_VALUE;
2373    }
2374    if (request->input_buffer != NULL) {
2375        b = request->input_buffer;
2376        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2377            LOGE("Request %d: Buffer %ld: Status not OK!",
2378                     frameNumber, (long)idx);
2379            return BAD_VALUE;
2380        }
2381        if (b->release_fence != -1) {
2382            LOGE("Request %d: Buffer %ld: Has a release fence!",
2383                     frameNumber, (long)idx);
2384            return BAD_VALUE;
2385        }
2386        if (b->buffer == NULL) {
2387            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2388                     frameNumber, (long)idx);
2389            return BAD_VALUE;
2390        }
2391    }
2392
2393    // Validate all buffers
2394    b = request->output_buffers;
2395    do {
2396        QCamera3ProcessingChannel *channel =
2397                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2398        if (channel == NULL) {
2399            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2400                     frameNumber, (long)idx);
2401            return BAD_VALUE;
2402        }
2403        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2404            LOGE("Request %d: Buffer %ld: Status not OK!",
2405                     frameNumber, (long)idx);
2406            return BAD_VALUE;
2407        }
2408        if (b->release_fence != -1) {
2409            LOGE("Request %d: Buffer %ld: Has a release fence!",
2410                     frameNumber, (long)idx);
2411            return BAD_VALUE;
2412        }
2413        if (b->buffer == NULL) {
2414            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2415                     frameNumber, (long)idx);
2416            return BAD_VALUE;
2417        }
2418        if (*(b->buffer) == NULL) {
2419            LOGE("Request %d: Buffer %ld: NULL private handle!",
2420                     frameNumber, (long)idx);
2421            return BAD_VALUE;
2422        }
2423        idx++;
2424        b = request->output_buffers + idx;
2425    } while (idx < (ssize_t)request->num_output_buffers);
2426
2427    return NO_ERROR;
2428}
2429
2430/*===========================================================================
2431 * FUNCTION   : deriveMinFrameDuration
2432 *
2433 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2434 *              on currently configured streams.
2435 *
2436 * PARAMETERS : NONE
2437 *
2438 * RETURN     : NONE
2439 *
2440 *==========================================================================*/
2441void QCamera3HardwareInterface::deriveMinFrameDuration()
2442{
2443    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2444
2445    maxJpegDim = 0;
2446    maxProcessedDim = 0;
2447    maxRawDim = 0;
2448
2449    // Figure out maximum jpeg, processed, and raw dimensions
2450    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2451        it != mStreamInfo.end(); it++) {
2452
2453        // Input stream doesn't have valid stream_type
2454        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2455            continue;
2456
2457        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2458        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2459            if (dimension > maxJpegDim)
2460                maxJpegDim = dimension;
2461        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2462                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2463                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2464            if (dimension > maxRawDim)
2465                maxRawDim = dimension;
2466        } else {
2467            if (dimension > maxProcessedDim)
2468                maxProcessedDim = dimension;
2469        }
2470    }
2471
2472    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2473            MAX_SIZES_CNT);
2474
2475    //Assume all jpeg dimensions are in processed dimensions.
2476    if (maxJpegDim > maxProcessedDim)
2477        maxProcessedDim = maxJpegDim;
2478    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2479    if (maxProcessedDim > maxRawDim) {
2480        maxRawDim = INT32_MAX;
2481
2482        for (size_t i = 0; i < count; i++) {
2483            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2484                    gCamCapability[mCameraId]->raw_dim[i].height;
2485            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2486                maxRawDim = dimension;
2487        }
2488    }
2489
2490    //Find minimum durations for processed, jpeg, and raw
2491    for (size_t i = 0; i < count; i++) {
2492        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2493                gCamCapability[mCameraId]->raw_dim[i].height) {
2494            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2495            break;
2496        }
2497    }
2498    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2499    for (size_t i = 0; i < count; i++) {
2500        if (maxProcessedDim ==
2501                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2502                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2503            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2504            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2505            break;
2506        }
2507    }
2508}
2509
2510/*===========================================================================
2511 * FUNCTION   : getMinFrameDuration
2512 *
2513 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2514 *              and current request configuration.
2515 *
2516 * PARAMETERS : @request: requset sent by the frameworks
2517 *
2518 * RETURN     : min farme duration for a particular request
2519 *
2520 *==========================================================================*/
2521int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2522{
2523    bool hasJpegStream = false;
2524    bool hasRawStream = false;
2525    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2526        const camera3_stream_t *stream = request->output_buffers[i].stream;
2527        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2528            hasJpegStream = true;
2529        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2530                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2531                stream->format == HAL_PIXEL_FORMAT_RAW16)
2532            hasRawStream = true;
2533    }
2534
2535    if (!hasJpegStream)
2536        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2537    else
2538        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2539}
2540
2541/*===========================================================================
2542 * FUNCTION   : handleBuffersDuringFlushLock
2543 *
2544 * DESCRIPTION: Account for buffers returned from back-end during flush
2545 *              This function is executed while mMutex is held by the caller.
2546 *
2547 * PARAMETERS :
2548 *   @buffer: image buffer for the callback
2549 *
2550 * RETURN     :
2551 *==========================================================================*/
2552void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2553{
2554    bool buffer_found = false;
2555    for (List<PendingBuffersInRequest>::iterator req =
2556            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2557            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2558        for (List<PendingBufferInfo>::iterator i =
2559                req->mPendingBufferList.begin();
2560                i != req->mPendingBufferList.end(); i++) {
2561            if (i->buffer == buffer->buffer) {
2562                mPendingBuffersMap.numPendingBufsAtFlush--;
2563                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2564                    buffer->buffer, req->frame_number,
2565                    mPendingBuffersMap.numPendingBufsAtFlush);
2566                buffer_found = true;
2567                break;
2568            }
2569        }
2570        if (buffer_found) {
2571            break;
2572        }
2573    }
2574    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2575        //signal the flush()
2576        LOGD("All buffers returned to HAL. Continue flush");
2577        pthread_cond_signal(&mBuffersCond);
2578    }
2579}
2580
2581
2582/*===========================================================================
2583 * FUNCTION   : handlePendingReprocResults
2584 *
2585 * DESCRIPTION: check and notify on any pending reprocess results
2586 *
2587 * PARAMETERS :
2588 *   @frame_number   : Pending request frame number
2589 *
2590 * RETURN     : int32_t type of status
2591 *              NO_ERROR  -- success
2592 *              none-zero failure code
2593 *==========================================================================*/
2594int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2595{
2596    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2597            j != mPendingReprocessResultList.end(); j++) {
2598        if (j->frame_number == frame_number) {
2599            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2600
2601            LOGD("Delayed reprocess notify %d",
2602                    frame_number);
2603
2604            for (pendingRequestIterator k = mPendingRequestsList.begin();
2605                    k != mPendingRequestsList.end(); k++) {
2606
2607                if (k->frame_number == j->frame_number) {
2608                    LOGD("Found reprocess frame number %d in pending reprocess List "
2609                            "Take it out!!",
2610                            k->frame_number);
2611
2612                    camera3_capture_result result;
2613                    memset(&result, 0, sizeof(camera3_capture_result));
2614                    result.frame_number = frame_number;
2615                    result.num_output_buffers = 1;
2616                    result.output_buffers =  &j->buffer;
2617                    result.input_buffer = k->input_buffer;
2618                    result.result = k->settings;
2619                    result.partial_result = PARTIAL_RESULT_COUNT;
2620                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2621
2622                    erasePendingRequest(k);
2623                    break;
2624                }
2625            }
2626            mPendingReprocessResultList.erase(j);
2627            break;
2628        }
2629    }
2630    return NO_ERROR;
2631}
2632
2633/*===========================================================================
2634 * FUNCTION   : handleBatchMetadata
2635 *
2636 * DESCRIPTION: Handles metadata buffer callback in batch mode
2637 *
2638 * PARAMETERS : @metadata_buf: metadata buffer
2639 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2640 *                 the meta buf in this method
2641 *
2642 * RETURN     :
2643 *
2644 *==========================================================================*/
2645void QCamera3HardwareInterface::handleBatchMetadata(
2646        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2647{
2648    ATRACE_CALL();
2649
2650    if (NULL == metadata_buf) {
2651        LOGE("metadata_buf is NULL");
2652        return;
2653    }
2654    /* In batch mode, the metdata will contain the frame number and timestamp of
2655     * the last frame in the batch. Eg: a batch containing buffers from request
2656     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2657     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2658     * multiple process_capture_results */
2659    metadata_buffer_t *metadata =
2660            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2661    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2662    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2663    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2664    uint32_t frame_number = 0, urgent_frame_number = 0;
2665    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2666    bool invalid_metadata = false;
2667    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2668    size_t loopCount = 1;
2669
2670    int32_t *p_frame_number_valid =
2671            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2672    uint32_t *p_frame_number =
2673            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2674    int64_t *p_capture_time =
2675            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2676    int32_t *p_urgent_frame_number_valid =
2677            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2678    uint32_t *p_urgent_frame_number =
2679            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2680
2681    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2682            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2683            (NULL == p_urgent_frame_number)) {
2684        LOGE("Invalid metadata");
2685        invalid_metadata = true;
2686    } else {
2687        frame_number_valid = *p_frame_number_valid;
2688        last_frame_number = *p_frame_number;
2689        last_frame_capture_time = *p_capture_time;
2690        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2691        last_urgent_frame_number = *p_urgent_frame_number;
2692    }
2693
2694    /* In batchmode, when no video buffers are requested, set_parms are sent
2695     * for every capture_request. The difference between consecutive urgent
2696     * frame numbers and frame numbers should be used to interpolate the
2697     * corresponding frame numbers and time stamps */
2698    pthread_mutex_lock(&mMutex);
2699    if (urgent_frame_number_valid) {
2700        first_urgent_frame_number =
2701                mPendingBatchMap.valueFor(last_urgent_frame_number);
2702        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2703                first_urgent_frame_number;
2704
2705        LOGH("urgent_frm: valid: %d frm_num: %d - %d",
2706                 urgent_frame_number_valid,
2707                first_urgent_frame_number, last_urgent_frame_number);
2708    }
2709
2710    if (frame_number_valid) {
2711        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2712        frameNumDiff = last_frame_number + 1 -
2713                first_frame_number;
2714        mPendingBatchMap.removeItem(last_frame_number);
2715
2716        LOGH("frm: valid: %d frm_num: %d - %d",
2717                 frame_number_valid,
2718                first_frame_number, last_frame_number);
2719
2720    }
2721    pthread_mutex_unlock(&mMutex);
2722
2723    if (urgent_frame_number_valid || frame_number_valid) {
2724        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2725        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2726            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2727                     urgentFrameNumDiff, last_urgent_frame_number);
2728        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2729            LOGE("frameNumDiff: %d frameNum: %d",
2730                     frameNumDiff, last_frame_number);
2731    }
2732
2733    for (size_t i = 0; i < loopCount; i++) {
2734        /* handleMetadataWithLock is called even for invalid_metadata for
2735         * pipeline depth calculation */
2736        if (!invalid_metadata) {
2737            /* Infer frame number. Batch metadata contains frame number of the
2738             * last frame */
2739            if (urgent_frame_number_valid) {
2740                if (i < urgentFrameNumDiff) {
2741                    urgent_frame_number =
2742                            first_urgent_frame_number + i;
2743                    LOGD("inferred urgent frame_number: %d",
2744                             urgent_frame_number);
2745                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2746                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2747                } else {
2748                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2749                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2750                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2751                }
2752            }
2753
2754            /* Infer frame number. Batch metadata contains frame number of the
2755             * last frame */
2756            if (frame_number_valid) {
2757                if (i < frameNumDiff) {
2758                    frame_number = first_frame_number + i;
2759                    LOGD("inferred frame_number: %d", frame_number);
2760                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2761                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2762                } else {
2763                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2764                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2765                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2766                }
2767            }
2768
2769            if (last_frame_capture_time) {
2770                //Infer timestamp
2771                first_frame_capture_time = last_frame_capture_time -
2772                        (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2773                capture_time =
2774                        first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2775                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2776                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2777                LOGH("batch capture_time: %lld, capture_time: %lld",
2778                         last_frame_capture_time, capture_time);
2779            }
2780        }
2781        pthread_mutex_lock(&mMutex);
2782        handleMetadataWithLock(metadata_buf,
2783                false /* free_and_bufdone_meta_buf */,
2784                (i == 0) /* first metadata in the batch metadata */);
2785        pthread_mutex_unlock(&mMutex);
2786    }
2787
2788    /* BufDone metadata buffer */
2789    if (free_and_bufdone_meta_buf) {
2790        mMetadataChannel->bufDone(metadata_buf);
2791        free(metadata_buf);
2792    }
2793}
2794
2795void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2796        camera3_error_msg_code_t errorCode)
2797{
2798    camera3_notify_msg_t notify_msg;
2799    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2800    notify_msg.type = CAMERA3_MSG_ERROR;
2801    notify_msg.message.error.error_code = errorCode;
2802    notify_msg.message.error.error_stream = NULL;
2803    notify_msg.message.error.frame_number = frameNumber;
2804    mCallbackOps->notify(mCallbackOps, &notify_msg);
2805
2806    return;
2807}
2808/*===========================================================================
2809 * FUNCTION   : handleMetadataWithLock
2810 *
2811 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2812 *
2813 * PARAMETERS : @metadata_buf: metadata buffer
2814 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2815 *                 the meta buf in this method
2816 *              @firstMetadataInBatch: Boolean to indicate whether this is the
2817 *                  first metadata in a batch. Valid only for batch mode
2818 *
2819 * RETURN     :
2820 *
2821 *==========================================================================*/
2822void QCamera3HardwareInterface::handleMetadataWithLock(
2823    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2824    bool firstMetadataInBatch)
2825{
2826    ATRACE_CALL();
2827    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2828        //during flush do not send metadata from this thread
2829        LOGD("not sending metadata during flush or when mState is error");
2830        if (free_and_bufdone_meta_buf) {
2831            mMetadataChannel->bufDone(metadata_buf);
2832            free(metadata_buf);
2833        }
2834        return;
2835    }
2836
2837    //not in flush
2838    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2839    int32_t frame_number_valid, urgent_frame_number_valid;
2840    uint32_t frame_number, urgent_frame_number;
2841    int64_t capture_time;
2842    nsecs_t currentSysTime;
2843
2844    int32_t *p_frame_number_valid =
2845            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2846    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2847    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2848    int32_t *p_urgent_frame_number_valid =
2849            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2850    uint32_t *p_urgent_frame_number =
2851            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2852    IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2853            metadata) {
2854        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2855                 *p_frame_number_valid, *p_frame_number);
2856    }
2857
2858    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2859            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2860        LOGE("Invalid metadata");
2861        if (free_and_bufdone_meta_buf) {
2862            mMetadataChannel->bufDone(metadata_buf);
2863            free(metadata_buf);
2864        }
2865        goto done_metadata;
2866    }
2867    frame_number_valid =        *p_frame_number_valid;
2868    frame_number =              *p_frame_number;
2869    capture_time =              *p_capture_time;
2870    urgent_frame_number_valid = *p_urgent_frame_number_valid;
2871    urgent_frame_number =       *p_urgent_frame_number;
2872    currentSysTime =            systemTime(CLOCK_MONOTONIC);
2873
2874    // Detect if buffers from any requests are overdue
2875    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2876        if ( (currentSysTime - req.timestamp) >
2877            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2878            for (auto &missed : req.mPendingBufferList) {
2879                assert(missed.stream->priv);
2880                if (missed.stream->priv) {
2881                    QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
2882                    assert(ch->mStreams[0]);
2883                    if (ch->mStreams[0]) {
2884                        LOGW("Missing: frame = %d, buffer = %p,"
2885                            "stream type = %d, stream format = %d",
2886                            req.frame_number, missed.buffer,
2887                            ch->mStreams[0]->getMyType(), missed.stream->format);
2888                    }
2889                }
2890            }
2891        }
2892    }
2893    //Partial result on process_capture_result for timestamp
2894    if (urgent_frame_number_valid) {
2895        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2896           urgent_frame_number, capture_time);
2897
2898        //Recieved an urgent Frame Number, handle it
2899        //using partial results
2900        for (pendingRequestIterator i =
2901                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2902            LOGD("Iterator Frame = %d urgent frame = %d",
2903                 i->frame_number, urgent_frame_number);
2904
2905            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2906                (i->partial_result_cnt == 0)) {
2907                LOGE("Error: HAL missed urgent metadata for frame number %d",
2908                         i->frame_number);
2909            }
2910
2911            if (i->frame_number == urgent_frame_number &&
2912                     i->bUrgentReceived == 0) {
2913
2914                camera3_capture_result_t result;
2915                memset(&result, 0, sizeof(camera3_capture_result_t));
2916
2917                i->partial_result_cnt++;
2918                i->bUrgentReceived = 1;
2919                // Extract 3A metadata
2920                result.result =
2921                    translateCbUrgentMetadataToResultMetadata(metadata);
2922                // Populate metadata result
2923                result.frame_number = urgent_frame_number;
2924                result.num_output_buffers = 0;
2925                result.output_buffers = NULL;
2926                result.partial_result = i->partial_result_cnt;
2927
2928                mCallbackOps->process_capture_result(mCallbackOps, &result);
2929                LOGD("urgent frame_number = %u, capture_time = %lld",
2930                      result.frame_number, capture_time);
2931                free_camera_metadata((camera_metadata_t *)result.result);
2932                break;
2933            }
2934        }
2935    }
2936
2937    if (!frame_number_valid) {
2938        LOGD("Not a valid normal frame number, used as SOF only");
2939        if (free_and_bufdone_meta_buf) {
2940            mMetadataChannel->bufDone(metadata_buf);
2941            free(metadata_buf);
2942        }
2943        goto done_metadata;
2944    }
2945    LOGH("valid frame_number = %u, capture_time = %lld",
2946            frame_number, capture_time);
2947
2948    for (pendingRequestIterator i = mPendingRequestsList.begin();
2949            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2950        // Flush out all entries with less or equal frame numbers.
2951
2952        camera3_capture_result_t result;
2953        memset(&result, 0, sizeof(camera3_capture_result_t));
2954
2955        LOGD("frame_number in the list is %u", i->frame_number);
2956        i->partial_result_cnt++;
2957        result.partial_result = i->partial_result_cnt;
2958
2959        // Check whether any stream buffer corresponding to this is dropped or not
2960        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2961        // The API does not expect a blob buffer to be dropped
2962        if (p_cam_frame_drop) {
2963            /* Clear notify_msg structure */
2964            camera3_notify_msg_t notify_msg;
2965            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2966            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2967                    j != i->buffers.end(); j++) {
2968                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2969                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2970                for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
2971                    if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
2972                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2973                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2974                                __func__, i->frame_number, streamID, j->stream->format);
2975                        notify_msg.type = CAMERA3_MSG_ERROR;
2976                        notify_msg.message.error.frame_number = i->frame_number;
2977                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2978                        notify_msg.message.error.error_stream = j->stream;
2979                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2980                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2981                                __func__, i->frame_number, streamID, j->stream->format);
2982                        PendingFrameDropInfo PendingFrameDrop;
2983                        PendingFrameDrop.frame_number=i->frame_number;
2984                        PendingFrameDrop.stream_ID = streamID;
2985                        // Add the Frame drop info to mPendingFrameDropList
2986                        mPendingFrameDropList.push_back(PendingFrameDrop);
2987                   }
2988               }
2989            }
2990        }
2991
2992        // Send empty metadata with already filled buffers for dropped metadata
2993        // and send valid metadata with already filled buffers for current metadata
2994        /* we could hit this case when we either
2995         * 1. have a pending reprocess request or
2996         * 2. miss a metadata buffer callback */
2997        if (i->frame_number < frame_number) {
2998            if (i->input_buffer) {
2999                /* this will be handled in handleInputBufferWithLock */
3000                i++;
3001                continue;
3002            } else if (mBatchSize) {
3003
3004                mPendingLiveRequest--;
3005
3006                CameraMetadata dummyMetadata;
3007                dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3008                result.result = dummyMetadata.release();
3009
3010                notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
3011            } else {
3012                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
3013                if (free_and_bufdone_meta_buf) {
3014                    mMetadataChannel->bufDone(metadata_buf);
3015                    free(metadata_buf);
3016                }
3017                mState = ERROR;
3018                goto done_metadata;
3019            }
3020        } else {
3021            mPendingLiveRequest--;
3022            /* Clear notify_msg structure */
3023            camera3_notify_msg_t notify_msg;
3024            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3025
3026            // Send shutter notify to frameworks
3027            notify_msg.type = CAMERA3_MSG_SHUTTER;
3028            notify_msg.message.shutter.frame_number = i->frame_number;
3029            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3030            mCallbackOps->notify(mCallbackOps, &notify_msg);
3031
3032            i->timestamp = capture_time;
3033
3034            // Find channel requiring metadata, meaning internal offline postprocess
3035            // is needed.
3036            //TODO: for now, we don't support two streams requiring metadata at the same time.
3037            // (because we are not making copies, and metadata buffer is not reference counted.
3038            bool internalPproc = false;
3039            for (pendingBufferIterator iter = i->buffers.begin();
3040                    iter != i->buffers.end(); iter++) {
3041                if (iter->need_metadata) {
3042                    internalPproc = true;
3043                    QCamera3ProcessingChannel *channel =
3044                            (QCamera3ProcessingChannel *)iter->stream->priv;
3045                    channel->queueReprocMetadata(metadata_buf);
3046                    break;
3047                }
3048            }
3049
3050            // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
3051            result.result = translateFromHalMetadata(metadata,
3052                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3053                    i->capture_intent, i->hybrid_ae_enable,
3054                     /* DevCamDebug metadata translateFromHalMetadata function call*/
3055                    i->DevCamDebug_meta_enable,
3056                    /* DevCamDebug metadata end */
3057                    internalPproc, i->fwkCacMode,
3058                    firstMetadataInBatch);
3059            // atrace_end(ATRACE_TAG_ALWAYS);
3060
3061            saveExifParams(metadata);
3062
3063            if (i->blob_request) {
3064                {
3065                    //Dump tuning metadata if enabled and available
3066                    char prop[PROPERTY_VALUE_MAX];
3067                    memset(prop, 0, sizeof(prop));
3068                    property_get("persist.camera.dumpmetadata", prop, "0");
3069                    int32_t enabled = atoi(prop);
3070                    if (enabled && metadata->is_tuning_params_valid) {
3071                        dumpMetadataToFile(metadata->tuning_params,
3072                               mMetaFrameCount,
3073                               enabled,
3074                               "Snapshot",
3075                               frame_number);
3076                    }
3077                }
3078            }
3079
3080            if (!internalPproc) {
3081                LOGD("couldn't find need_metadata for this metadata");
3082                // Return metadata buffer
3083                if (free_and_bufdone_meta_buf) {
3084                    mMetadataChannel->bufDone(metadata_buf);
3085                    free(metadata_buf);
3086                }
3087            }
3088        }
3089        if (!result.result) {
3090            LOGE("metadata is NULL");
3091        }
3092        result.frame_number = i->frame_number;
3093        result.input_buffer = i->input_buffer;
3094        result.num_output_buffers = 0;
3095        result.output_buffers = NULL;
3096        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3097                    j != i->buffers.end(); j++) {
3098            if (j->buffer) {
3099                result.num_output_buffers++;
3100            }
3101        }
3102
3103        updateFpsInPreviewBuffer(metadata, i->frame_number);
3104
3105        if (result.num_output_buffers > 0) {
3106            camera3_stream_buffer_t *result_buffers =
3107                new camera3_stream_buffer_t[result.num_output_buffers];
3108            if (result_buffers != NULL) {
3109                size_t result_buffers_idx = 0;
3110                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3111                        j != i->buffers.end(); j++) {
3112                    if (j->buffer) {
3113                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3114                                m != mPendingFrameDropList.end(); m++) {
3115                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3116                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3117                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3118                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3119                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3120                                        frame_number, streamID);
3121                                m = mPendingFrameDropList.erase(m);
3122                                break;
3123                            }
3124                        }
3125                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
3126                        result_buffers[result_buffers_idx++] = *(j->buffer);
3127                        free(j->buffer);
3128                        j->buffer = NULL;
3129                    }
3130                }
3131                result.output_buffers = result_buffers;
3132                mCallbackOps->process_capture_result(mCallbackOps, &result);
3133                LOGD("meta frame_number = %u, capture_time = %lld",
3134                        result.frame_number, i->timestamp);
3135                free_camera_metadata((camera_metadata_t *)result.result);
3136                delete[] result_buffers;
3137            }else {
3138                LOGE("Fatal error: out of memory");
3139            }
3140        } else {
3141            mCallbackOps->process_capture_result(mCallbackOps, &result);
3142            LOGD("meta frame_number = %u, capture_time = %lld",
3143                    result.frame_number, i->timestamp);
3144            free_camera_metadata((camera_metadata_t *)result.result);
3145        }
3146
3147        i = erasePendingRequest(i);
3148
3149        if (!mPendingReprocessResultList.empty()) {
3150            handlePendingReprocResults(frame_number + 1);
3151        }
3152    }
3153
3154done_metadata:
3155    for (pendingRequestIterator i = mPendingRequestsList.begin();
3156            i != mPendingRequestsList.end() ;i++) {
3157        i->pipeline_depth++;
3158    }
3159    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3160    unblockRequestIfNecessary();
3161}
3162
3163/*===========================================================================
3164 * FUNCTION   : hdrPlusPerfLock
3165 *
3166 * DESCRIPTION: perf lock for HDR+ using custom intent
3167 *
3168 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3169 *
3170 * RETURN     : None
3171 *
3172 *==========================================================================*/
3173void QCamera3HardwareInterface::hdrPlusPerfLock(
3174        mm_camera_super_buf_t *metadata_buf)
3175{
3176    if (NULL == metadata_buf) {
3177        LOGE("metadata_buf is NULL");
3178        return;
3179    }
3180    metadata_buffer_t *metadata =
3181            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3182    int32_t *p_frame_number_valid =
3183            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3184    uint32_t *p_frame_number =
3185            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3186
3187    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3188        LOGE("%s: Invalid metadata", __func__);
3189        return;
3190    }
3191
3192    //acquire perf lock for 5 sec after the last HDR frame is captured
3193    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3194        if ((p_frame_number != NULL) &&
3195                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3196            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3197        }
3198    }
3199
3200    //release lock after perf lock timer is expired. If lock is already released,
3201    //isTimerReset returns false
3202    if (m_perfLock.isTimerReset()) {
3203        mLastCustIntentFrmNum = -1;
3204        m_perfLock.lock_rel_timed();
3205    }
3206}
3207
3208/*===========================================================================
3209 * FUNCTION   : handleInputBufferWithLock
3210 *
3211 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3212 *
3213 * PARAMETERS : @frame_number: frame number of the input buffer
3214 *
3215 * RETURN     :
3216 *
3217 *==========================================================================*/
3218void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3219{
3220    ATRACE_CALL();
3221    pendingRequestIterator i = mPendingRequestsList.begin();
3222    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3223        i++;
3224    }
3225    if (i != mPendingRequestsList.end() && i->input_buffer) {
3226        //found the right request
3227        if (!i->shutter_notified) {
3228            CameraMetadata settings;
3229            camera3_notify_msg_t notify_msg;
3230            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3231            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3232            if(i->settings) {
3233                settings = i->settings;
3234                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3235                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3236                } else {
3237                    LOGE("No timestamp in input settings! Using current one.");
3238                }
3239            } else {
3240                LOGE("Input settings missing!");
3241            }
3242
3243            notify_msg.type = CAMERA3_MSG_SHUTTER;
3244            notify_msg.message.shutter.frame_number = frame_number;
3245            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3246            mCallbackOps->notify(mCallbackOps, &notify_msg);
3247            i->shutter_notified = true;
3248            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3249                        i->frame_number, notify_msg.message.shutter.timestamp);
3250        }
3251
3252        if (i->input_buffer->release_fence != -1) {
3253           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3254           close(i->input_buffer->release_fence);
3255           if (rc != OK) {
3256               LOGE("input buffer sync wait failed %d", rc);
3257           }
3258        }
3259
3260        camera3_capture_result result;
3261        memset(&result, 0, sizeof(camera3_capture_result));
3262        result.frame_number = frame_number;
3263        result.result = i->settings;
3264        result.input_buffer = i->input_buffer;
3265        result.partial_result = PARTIAL_RESULT_COUNT;
3266
3267        mCallbackOps->process_capture_result(mCallbackOps, &result);
3268        LOGD("Input request metadata and input buffer frame_number = %u",
3269                        i->frame_number);
3270        i = erasePendingRequest(i);
3271    } else {
3272        LOGE("Could not find input request for frame number %d", frame_number);
3273    }
3274}
3275
3276/*===========================================================================
3277 * FUNCTION   : handleBufferWithLock
3278 *
3279 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3280 *
3281 * PARAMETERS : @buffer: image buffer for the callback
3282 *              @frame_number: frame number of the image buffer
3283 *
3284 * RETURN     :
3285 *
3286 *==========================================================================*/
3287void QCamera3HardwareInterface::handleBufferWithLock(
3288    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3289{
3290    ATRACE_CALL();
3291    /* Nothing to be done during error state */
3292    if ((ERROR == mState) || (DEINIT == mState)) {
3293        return;
3294    }
3295    if (mFlushPerf) {
3296        handleBuffersDuringFlushLock(buffer);
3297        return;
3298    }
3299    //not in flush
3300    // If the frame number doesn't exist in the pending request list,
3301    // directly send the buffer to the frameworks, and update pending buffers map
3302    // Otherwise, book-keep the buffer.
3303    pendingRequestIterator i = mPendingRequestsList.begin();
3304    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3305        i++;
3306    }
3307    if (i == mPendingRequestsList.end()) {
3308        // Verify all pending requests frame_numbers are greater
3309        for (pendingRequestIterator j = mPendingRequestsList.begin();
3310                j != mPendingRequestsList.end(); j++) {
3311            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3312                LOGW("Error: pending live frame number %d is smaller than %d",
3313                         j->frame_number, frame_number);
3314            }
3315        }
3316        camera3_capture_result_t result;
3317        memset(&result, 0, sizeof(camera3_capture_result_t));
3318        result.result = NULL;
3319        result.frame_number = frame_number;
3320        result.num_output_buffers = 1;
3321        result.partial_result = 0;
3322        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3323                m != mPendingFrameDropList.end(); m++) {
3324            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3325            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3326            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3327                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3328                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3329                         frame_number, streamID);
3330                m = mPendingFrameDropList.erase(m);
3331                break;
3332            }
3333        }
3334        result.output_buffers = buffer;
3335        LOGH("result frame_number = %d, buffer = %p",
3336                 frame_number, buffer->buffer);
3337
3338        mPendingBuffersMap.removeBuf(buffer->buffer);
3339
3340        mCallbackOps->process_capture_result(mCallbackOps, &result);
3341    } else {
3342        if (i->input_buffer) {
3343            CameraMetadata settings;
3344            camera3_notify_msg_t notify_msg;
3345            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3346            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3347            if(i->settings) {
3348                settings = i->settings;
3349                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3350                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3351                } else {
3352                    LOGW("No timestamp in input settings! Using current one.");
3353                }
3354            } else {
3355                LOGE("Input settings missing!");
3356            }
3357
3358            notify_msg.type = CAMERA3_MSG_SHUTTER;
3359            notify_msg.message.shutter.frame_number = frame_number;
3360            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3361
3362            if (i->input_buffer->release_fence != -1) {
3363               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3364               close(i->input_buffer->release_fence);
3365               if (rc != OK) {
3366                   LOGE("input buffer sync wait failed %d", rc);
3367               }
3368            }
3369            mPendingBuffersMap.removeBuf(buffer->buffer);
3370
3371            bool notifyNow = true;
3372            for (pendingRequestIterator j = mPendingRequestsList.begin();
3373                    j != mPendingRequestsList.end(); j++) {
3374                if (j->frame_number < frame_number) {
3375                    notifyNow = false;
3376                    break;
3377                }
3378            }
3379
3380            if (notifyNow) {
3381                camera3_capture_result result;
3382                memset(&result, 0, sizeof(camera3_capture_result));
3383                result.frame_number = frame_number;
3384                result.result = i->settings;
3385                result.input_buffer = i->input_buffer;
3386                result.num_output_buffers = 1;
3387                result.output_buffers = buffer;
3388                result.partial_result = PARTIAL_RESULT_COUNT;
3389
3390                mCallbackOps->notify(mCallbackOps, &notify_msg);
3391                mCallbackOps->process_capture_result(mCallbackOps, &result);
3392                LOGD("Notify reprocess now %d!", frame_number);
3393                i = erasePendingRequest(i);
3394            } else {
3395                // Cache reprocess result for later
3396                PendingReprocessResult pendingResult;
3397                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3398                pendingResult.notify_msg = notify_msg;
3399                pendingResult.buffer = *buffer;
3400                pendingResult.frame_number = frame_number;
3401                mPendingReprocessResultList.push_back(pendingResult);
3402                LOGD("Cache reprocess result %d!", frame_number);
3403            }
3404        } else {
3405            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3406                j != i->buffers.end(); j++) {
3407                if (j->stream == buffer->stream) {
3408                    if (j->buffer != NULL) {
3409                        LOGE("Error: buffer is already set");
3410                    } else {
3411                        j->buffer = (camera3_stream_buffer_t *)malloc(
3412                            sizeof(camera3_stream_buffer_t));
3413                        *(j->buffer) = *buffer;
3414                        LOGH("cache buffer %p at result frame_number %u",
3415                             buffer->buffer, frame_number);
3416                    }
3417                }
3418            }
3419        }
3420    }
3421}
3422
3423/*===========================================================================
3424 * FUNCTION   : unblockRequestIfNecessary
3425 *
3426 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3427 *              that mMutex is held when this function is called.
3428 *
3429 * PARAMETERS :
3430 *
3431 * RETURN     :
3432 *
3433 *==========================================================================*/
3434void QCamera3HardwareInterface::unblockRequestIfNecessary()
3435{
3436   // Unblock process_capture_request
3437   pthread_cond_signal(&mRequestCond);
3438}
3439
3440
3441/*===========================================================================
3442 * FUNCTION   : processCaptureRequest
3443 *
3444 * DESCRIPTION: process a capture request from camera service
3445 *
3446 * PARAMETERS :
3447 *   @request : request from framework to process
3448 *
3449 * RETURN     :
3450 *
3451 *==========================================================================*/
3452int QCamera3HardwareInterface::processCaptureRequest(
3453                    camera3_capture_request_t *request)
3454{
3455    ATRACE_CALL();
3456    int rc = NO_ERROR;
3457    int32_t request_id;
3458    CameraMetadata meta;
3459    bool isVidBufRequested = false;
3460    camera3_stream_buffer_t *pInputBuffer = NULL;
3461
3462    pthread_mutex_lock(&mMutex);
3463
3464    // Validate current state
3465    switch (mState) {
3466        case CONFIGURED:
3467        case STARTED:
3468            /* valid state */
3469            break;
3470
3471        case ERROR:
3472            pthread_mutex_unlock(&mMutex);
3473            handleCameraDeviceError();
3474            return -ENODEV;
3475
3476        default:
3477            LOGE("Invalid state %d", mState);
3478            pthread_mutex_unlock(&mMutex);
3479            return -ENODEV;
3480    }
3481
3482    rc = validateCaptureRequest(request);
3483    if (rc != NO_ERROR) {
3484        LOGE("incoming request is not valid");
3485        pthread_mutex_unlock(&mMutex);
3486        return rc;
3487    }
3488
3489    meta = request->settings;
3490
3491    // For first capture request, send capture intent, and
3492    // stream on all streams
3493    if (mState == CONFIGURED) {
3494        // send an unconfigure to the backend so that the isp
3495        // resources are deallocated
3496        if (!mFirstConfiguration) {
3497            cam_stream_size_info_t stream_config_info;
3498            int32_t hal_version = CAM_HAL_V3;
3499            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3500            stream_config_info.buffer_info.min_buffers =
3501                    MIN_INFLIGHT_REQUESTS;
3502            stream_config_info.buffer_info.max_buffers =
3503                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3504            clear_metadata_buffer(mParameters);
3505            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3506                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3507            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3508                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3509            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3510                    mParameters);
3511            if (rc < 0) {
3512                LOGE("set_parms for unconfigure failed");
3513                pthread_mutex_unlock(&mMutex);
3514                return rc;
3515            }
3516        }
3517        m_perfLock.lock_acq();
3518        /* get eis information for stream configuration */
3519        cam_is_type_t is_type;
3520        char is_type_value[PROPERTY_VALUE_MAX];
3521        property_get("persist.camera.is_type", is_type_value, "0");
3522        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3523
3524        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3525            int32_t hal_version = CAM_HAL_V3;
3526            uint8_t captureIntent =
3527                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3528            mCaptureIntent = captureIntent;
3529            clear_metadata_buffer(mParameters);
3530            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3531            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3532        }
3533
3534        //If EIS is enabled, turn it on for video
3535        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3536        int32_t vsMode;
3537        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3538        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3539            rc = BAD_VALUE;
3540        }
3541
3542        //IS type will be 0 unless EIS is supported. If EIS is supported
3543        //it could either be 1 or 4 depending on the stream and video size
3544        if (setEis) {
3545            if (!m_bEisSupportedSize) {
3546                is_type = IS_TYPE_DIS;
3547            } else {
3548                is_type = IS_TYPE_EIS_2_0;
3549            }
3550            mStreamConfigInfo.is_type = is_type;
3551        } else {
3552            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3553        }
3554
3555        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3556                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3557        int32_t tintless_value = 1;
3558        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3559                CAM_INTF_PARM_TINTLESS, tintless_value);
3560        //Disable CDS for HFR mode or if DIS/EIS is on.
3561        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3562        //after every configure_stream
3563        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3564                (m_bIsVideo)) {
3565            int32_t cds = CAM_CDS_MODE_OFF;
3566            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3567                    CAM_INTF_PARM_CDS_MODE, cds))
3568                LOGE("Failed to disable CDS for HFR mode");
3569
3570        }
3571
3572        if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3573            uint8_t* use_av_timer = NULL;
3574
3575            if (m_debug_avtimer){
3576                use_av_timer = &m_debug_avtimer;
3577            }
3578            else{
3579                use_av_timer =
3580                    meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3581            }
3582
3583            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3584                rc = BAD_VALUE;
3585            }
3586        }
3587
3588        setMobicat();
3589
3590        /* Set fps and hfr mode while sending meta stream info so that sensor
3591         * can configure appropriate streaming mode */
3592        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3593        mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3594        mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3595        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3596            rc = setHalFpsRange(meta, mParameters);
3597            if (rc == NO_ERROR) {
3598                int32_t max_fps =
3599                    (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3600                if (mBatchSize) {
3601                    /* For HFR, more buffers are dequeued upfront to improve the performance */
3602                    mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3603                    mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3604                } else if (max_fps == 60) {
3605                    /* for 60 fps usecas increase inflight requests */
3606                    mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3607                    mMaxInFlightRequests = MAX_INFLIGHT_60FPS_REQUESTS;
3608                } else if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3609                    /* for non 60 fps video use cases, set min = max inflight requests to
3610                    avoid frame drops due to degraded system performance */
3611                    mMinInFlightRequests = MAX_INFLIGHT_REQUESTS;
3612                }
3613            }
3614            else {
3615                LOGE("setHalFpsRange failed");
3616            }
3617        }
3618        if (meta.exists(ANDROID_CONTROL_MODE)) {
3619            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3620            rc = extractSceneMode(meta, metaMode, mParameters);
3621            if (rc != NO_ERROR) {
3622                LOGE("extractSceneMode failed");
3623            }
3624        }
3625        memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3626
3627
3628        //TODO: validate the arguments, HSV scenemode should have only the
3629        //advertised fps ranges
3630
3631        /*set the capture intent, hal version, tintless, stream info,
3632         *and disenable parameters to the backend*/
3633        LOGD("set_parms META_STREAM_INFO " );
3634        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3635            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3636                    "Format:%d",
3637                    mStreamConfigInfo.type[i],
3638                    mStreamConfigInfo.stream_sizes[i].width,
3639                    mStreamConfigInfo.stream_sizes[i].height,
3640                    mStreamConfigInfo.postprocess_mask[i],
3641                    mStreamConfigInfo.format[i]);
3642        }
3643
3644        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3645                    mParameters);
3646        if (rc < 0) {
3647            LOGE("set_parms failed for hal version, stream info");
3648        }
3649
3650        cam_dimension_t sensor_dim;
3651        memset(&sensor_dim, 0, sizeof(sensor_dim));
3652        rc = getSensorOutputSize(sensor_dim);
3653        if (rc != NO_ERROR) {
3654            LOGE("Failed to get sensor output size");
3655            pthread_mutex_unlock(&mMutex);
3656            goto error_exit;
3657        }
3658
3659        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3660                gCamCapability[mCameraId]->active_array_size.height,
3661                sensor_dim.width, sensor_dim.height);
3662
3663        /* Set batchmode before initializing channel. Since registerBuffer
3664         * internally initializes some of the channels, better set batchmode
3665         * even before first register buffer */
3666        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3667            it != mStreamInfo.end(); it++) {
3668            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3669            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3670                    && mBatchSize) {
3671                rc = channel->setBatchSize(mBatchSize);
3672                //Disable per frame map unmap for HFR/batchmode case
3673                rc |= channel->setPerFrameMapUnmap(false);
3674                if (NO_ERROR != rc) {
3675                    LOGE("Channel init failed %d", rc);
3676                    pthread_mutex_unlock(&mMutex);
3677                    goto error_exit;
3678                }
3679            }
3680        }
3681
3682        //First initialize all streams
3683        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3684            it != mStreamInfo.end(); it++) {
3685            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3686            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3687               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3688               setEis)
3689                rc = channel->initialize(is_type);
3690            else {
3691                rc = channel->initialize(IS_TYPE_NONE);
3692            }
3693            if (NO_ERROR != rc) {
3694                LOGE("Channel initialization failed %d", rc);
3695                pthread_mutex_unlock(&mMutex);
3696                goto error_exit;
3697            }
3698        }
3699
3700        if (mRawDumpChannel) {
3701            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3702            if (rc != NO_ERROR) {
3703                LOGE("Error: Raw Dump Channel init failed");
3704                pthread_mutex_unlock(&mMutex);
3705                goto error_exit;
3706            }
3707        }
3708        if (mSupportChannel) {
3709            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3710            if (rc < 0) {
3711                LOGE("Support channel initialization failed");
3712                pthread_mutex_unlock(&mMutex);
3713                goto error_exit;
3714            }
3715        }
3716        if (mAnalysisChannel) {
3717            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3718            if (rc < 0) {
3719                LOGE("Analysis channel initialization failed");
3720                pthread_mutex_unlock(&mMutex);
3721                goto error_exit;
3722            }
3723        }
3724        if (mDummyBatchChannel) {
3725            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3726            if (rc < 0) {
3727                LOGE("mDummyBatchChannel setBatchSize failed");
3728                pthread_mutex_unlock(&mMutex);
3729                goto error_exit;
3730            }
3731            rc = mDummyBatchChannel->initialize(is_type);
3732            if (rc < 0) {
3733                LOGE("mDummyBatchChannel initialization failed");
3734                pthread_mutex_unlock(&mMutex);
3735                goto error_exit;
3736            }
3737        }
3738
3739        // Set bundle info
3740        rc = setBundleInfo();
3741        if (rc < 0) {
3742            LOGE("setBundleInfo failed %d", rc);
3743            pthread_mutex_unlock(&mMutex);
3744            goto error_exit;
3745        }
3746
3747        //update settings from app here
3748        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3749            mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3750            LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3751        }
3752        if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3753            mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3754            LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3755        }
3756        if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3757            mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3758            LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3759
3760            if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3761                (mLinkedCameraId != mCameraId) ) {
3762                LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3763                    mLinkedCameraId, mCameraId);
3764                goto error_exit;
3765            }
3766        }
3767
3768        // add bundle related cameras
3769        LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3770        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3771            if (mIsDeviceLinked)
3772                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3773            else
3774                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3775
3776            pthread_mutex_lock(&gCamLock);
3777
3778            if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3779                LOGE("Dualcam: Invalid Session Id ");
3780                pthread_mutex_unlock(&gCamLock);
3781                goto error_exit;
3782            }
3783
3784            if (mIsMainCamera == 1) {
3785                m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3786                m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3787                // related session id should be session id of linked session
3788                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3789            } else {
3790                m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3791                m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3792                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3793            }
3794            pthread_mutex_unlock(&gCamLock);
3795
3796            rc = mCameraHandle->ops->sync_related_sensors(
3797                    mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3798            if (rc < 0) {
3799                LOGE("Dualcam: link failed");
3800                goto error_exit;
3801            }
3802        }
3803
3804        //Then start them.
3805        LOGH("Start META Channel");
3806        rc = mMetadataChannel->start();
3807        if (rc < 0) {
3808            LOGE("META channel start failed");
3809            pthread_mutex_unlock(&mMutex);
3810            goto error_exit;
3811        }
3812
3813        if (mAnalysisChannel) {
3814            rc = mAnalysisChannel->start();
3815            if (rc < 0) {
3816                LOGE("Analysis channel start failed");
3817                mMetadataChannel->stop();
3818                pthread_mutex_unlock(&mMutex);
3819                goto error_exit;
3820            }
3821        }
3822
3823        if (mSupportChannel) {
3824            rc = mSupportChannel->start();
3825            if (rc < 0) {
3826                LOGE("Support channel start failed");
3827                mMetadataChannel->stop();
3828                /* Although support and analysis are mutually exclusive today
3829                   adding it in anycase for future proofing */
3830                if (mAnalysisChannel) {
3831                    mAnalysisChannel->stop();
3832                }
3833                pthread_mutex_unlock(&mMutex);
3834                goto error_exit;
3835            }
3836        }
3837        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3838            it != mStreamInfo.end(); it++) {
3839            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3840            LOGH("Start Processing Channel mask=%d",
3841                     channel->getStreamTypeMask());
3842            rc = channel->start();
3843            if (rc < 0) {
3844                LOGE("channel start failed");
3845                pthread_mutex_unlock(&mMutex);
3846                goto error_exit;
3847            }
3848        }
3849
3850        if (mRawDumpChannel) {
3851            LOGD("Starting raw dump stream");
3852            rc = mRawDumpChannel->start();
3853            if (rc != NO_ERROR) {
3854                LOGE("Error Starting Raw Dump Channel");
3855                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3856                      it != mStreamInfo.end(); it++) {
3857                    QCamera3Channel *channel =
3858                        (QCamera3Channel *)(*it)->stream->priv;
3859                    LOGH("Stopping Processing Channel mask=%d",
3860                        channel->getStreamTypeMask());
3861                    channel->stop();
3862                }
3863                if (mSupportChannel)
3864                    mSupportChannel->stop();
3865                if (mAnalysisChannel) {
3866                    mAnalysisChannel->stop();
3867                }
3868                mMetadataChannel->stop();
3869                pthread_mutex_unlock(&mMutex);
3870                goto error_exit;
3871            }
3872        }
3873
3874        if (mChannelHandle) {
3875
3876            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3877                    mChannelHandle);
3878            if (rc != NO_ERROR) {
3879                LOGE("start_channel failed %d", rc);
3880                pthread_mutex_unlock(&mMutex);
3881                goto error_exit;
3882            }
3883        }
3884
3885        goto no_error;
3886error_exit:
3887        m_perfLock.lock_rel();
3888        return rc;
3889no_error:
3890        m_perfLock.lock_rel();
3891
3892        mWokenUpByDaemon = false;
3893        mPendingLiveRequest = 0;
3894        mFirstConfiguration = false;
3895        enablePowerHint();
3896    }
3897
3898    uint32_t frameNumber = request->frame_number;
3899    cam_stream_ID_t streamsArray;
3900
3901    if (mFlushPerf) {
3902        //we cannot accept any requests during flush
3903        LOGE("process_capture_request cannot proceed during flush");
3904        pthread_mutex_unlock(&mMutex);
3905        return NO_ERROR; //should return an error
3906    }
3907
3908    if (meta.exists(ANDROID_REQUEST_ID)) {
3909        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3910        mCurrentRequestId = request_id;
3911        LOGD("Received request with id: %d", request_id);
3912    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3913        LOGE("Unable to find request id field, \
3914                & no previous id available");
3915        pthread_mutex_unlock(&mMutex);
3916        return NAME_NOT_FOUND;
3917    } else {
3918        LOGD("Re-using old request id");
3919        request_id = mCurrentRequestId;
3920    }
3921
3922    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3923                                    request->num_output_buffers,
3924                                    request->input_buffer,
3925                                    frameNumber);
3926    // Acquire all request buffers first
3927    streamsArray.num_streams = 0;
3928    int blob_request = 0;
3929    uint32_t snapshotStreamId = 0;
3930    for (size_t i = 0; i < request->num_output_buffers; i++) {
3931        const camera3_stream_buffer_t& output = request->output_buffers[i];
3932        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3933
3934        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3935            //Call function to store local copy of jpeg data for encode params.
3936            blob_request = 1;
3937            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3938        }
3939
3940        if (output.acquire_fence != -1) {
3941           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3942           close(output.acquire_fence);
3943           if (rc != OK) {
3944              LOGE("sync wait failed %d", rc);
3945              pthread_mutex_unlock(&mMutex);
3946              return rc;
3947           }
3948        }
3949
3950        streamsArray.stream_request[streamsArray.num_streams++].streamID =
3951            channel->getStreamID(channel->getStreamTypeMask());
3952
3953        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3954            isVidBufRequested = true;
3955        }
3956    }
3957
3958    if (blob_request) {
3959        KPI_ATRACE_INT("SNAPSHOT", 1);
3960    }
3961    if (blob_request && mRawDumpChannel) {
3962        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3963        streamsArray.stream_request[streamsArray.num_streams].streamID =
3964            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3965        streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
3966    }
3967
3968    if(request->input_buffer == NULL) {
3969        /* Parse the settings:
3970         * - For every request in NORMAL MODE
3971         * - For every request in HFR mode during preview only case
3972         * - For first request of every batch in HFR mode during video
3973         * recording. In batchmode the same settings except frame number is
3974         * repeated in each request of the batch.
3975         */
3976        if (!mBatchSize ||
3977           (mBatchSize && !isVidBufRequested) ||
3978           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3979            rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
3980            if (rc < 0) {
3981                LOGE("fail to set frame parameters");
3982                pthread_mutex_unlock(&mMutex);
3983                return rc;
3984            }
3985        }
3986        /* For batchMode HFR, setFrameParameters is not called for every
3987         * request. But only frame number of the latest request is parsed.
3988         * Keep track of first and last frame numbers in a batch so that
3989         * metadata for the frame numbers of batch can be duplicated in
3990         * handleBatchMetadta */
3991        if (mBatchSize) {
3992            if (!mToBeQueuedVidBufs) {
3993                //start of the batch
3994                mFirstFrameNumberInBatch = request->frame_number;
3995            }
3996            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3997                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3998                LOGE("Failed to set the frame number in the parameters");
3999                return BAD_VALUE;
4000            }
4001        }
4002        if (mNeedSensorRestart) {
4003            /* Unlock the mutex as restartSensor waits on the channels to be
4004             * stopped, which in turn calls stream callback functions -
4005             * handleBufferWithLock and handleMetadataWithLock */
4006            pthread_mutex_unlock(&mMutex);
4007            rc = dynamicUpdateMetaStreamInfo();
4008            if (rc != NO_ERROR) {
4009                LOGE("Restarting the sensor failed");
4010                return BAD_VALUE;
4011            }
4012            mNeedSensorRestart = false;
4013            pthread_mutex_lock(&mMutex);
4014        }
4015    } else {
4016
4017        if (request->input_buffer->acquire_fence != -1) {
4018           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4019           close(request->input_buffer->acquire_fence);
4020           if (rc != OK) {
4021              LOGE("input buffer sync wait failed %d", rc);
4022              pthread_mutex_unlock(&mMutex);
4023              return rc;
4024           }
4025        }
4026    }
4027
4028    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4029        mLastCustIntentFrmNum = frameNumber;
4030    }
4031    /* Update pending request list and pending buffers map */
4032    PendingRequestInfo pendingRequest;
4033    pendingRequestIterator latestRequest;
4034    pendingRequest.frame_number = frameNumber;
4035    pendingRequest.num_buffers = request->num_output_buffers;
4036    pendingRequest.request_id = request_id;
4037    pendingRequest.blob_request = blob_request;
4038    pendingRequest.timestamp = 0;
4039    pendingRequest.bUrgentReceived = 0;
4040    if (request->input_buffer) {
4041        pendingRequest.input_buffer =
4042                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4043        *(pendingRequest.input_buffer) = *(request->input_buffer);
4044        pInputBuffer = pendingRequest.input_buffer;
4045    } else {
4046       pendingRequest.input_buffer = NULL;
4047       pInputBuffer = NULL;
4048    }
4049
4050    pendingRequest.pipeline_depth = 0;
4051    pendingRequest.partial_result_cnt = 0;
4052    extractJpegMetadata(mCurJpegMeta, request);
4053    pendingRequest.jpegMetadata = mCurJpegMeta;
4054    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4055    pendingRequest.shutter_notified = false;
4056
4057    //extract capture intent
4058    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4059        mCaptureIntent =
4060                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4061    }
4062    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
4063        mHybridAeEnable =
4064                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
4065    }
4066    pendingRequest.capture_intent = mCaptureIntent;
4067    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
4068    /* DevCamDebug metadata processCaptureRequest */
4069    if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4070        mDevCamDebugMetaEnable =
4071                meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4072    }
4073    pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4074    /* DevCamDebug metadata end */
4075
4076    //extract CAC info
4077    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4078        mCacMode =
4079                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4080    }
4081    pendingRequest.fwkCacMode = mCacMode;
4082
4083    PendingBuffersInRequest bufsForCurRequest;
4084    bufsForCurRequest.frame_number = frameNumber;
4085    // Mark current timestamp for the new request
4086    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4087
4088    for (size_t i = 0; i < request->num_output_buffers; i++) {
4089        RequestedBufferInfo requestedBuf;
4090        memset(&requestedBuf, 0, sizeof(requestedBuf));
4091        requestedBuf.stream = request->output_buffers[i].stream;
4092        requestedBuf.buffer = NULL;
4093        pendingRequest.buffers.push_back(requestedBuf);
4094
4095        // Add to buffer handle the pending buffers list
4096        PendingBufferInfo bufferInfo;
4097        bufferInfo.buffer = request->output_buffers[i].buffer;
4098        bufferInfo.stream = request->output_buffers[i].stream;
4099        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4100        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4101        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4102            frameNumber, bufferInfo.buffer,
4103            channel->getStreamTypeMask(), bufferInfo.stream->format);
4104    }
4105    // Add this request packet into mPendingBuffersMap
4106    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4107    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4108        mPendingBuffersMap.get_num_overall_buffers());
4109
4110    latestRequest = mPendingRequestsList.insert(
4111            mPendingRequestsList.end(), pendingRequest);
4112    if(mFlush) {
4113        LOGI("mFlush is true");
4114        pthread_mutex_unlock(&mMutex);
4115        return NO_ERROR;
4116    }
4117
4118    int indexUsed;
4119    // Notify metadata channel we receive a request
4120    mMetadataChannel->request(NULL, frameNumber, indexUsed);
4121
4122    if(request->input_buffer != NULL){
4123        LOGD("Input request, frame_number %d", frameNumber);
4124        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4125        if (NO_ERROR != rc) {
4126            LOGE("fail to set reproc parameters");
4127            pthread_mutex_unlock(&mMutex);
4128            return rc;
4129        }
4130    }
4131
4132    // Call request on other streams
4133    uint32_t streams_need_metadata = 0;
4134    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4135    for (size_t i = 0; i < request->num_output_buffers; i++) {
4136        const camera3_stream_buffer_t& output = request->output_buffers[i];
4137        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4138
4139        if (channel == NULL) {
4140            LOGW("invalid channel pointer for stream");
4141            continue;
4142        }
4143
4144        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4145            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4146                      output.buffer, request->input_buffer, frameNumber);
4147            if(request->input_buffer != NULL){
4148                rc = channel->request(output.buffer, frameNumber,
4149                        pInputBuffer, &mReprocMeta, indexUsed);
4150                if (rc < 0) {
4151                    LOGE("Fail to request on picture channel");
4152                    pthread_mutex_unlock(&mMutex);
4153                    return rc;
4154                }
4155            } else {
4156                LOGD("snapshot request with buffer %p, frame_number %d",
4157                         output.buffer, frameNumber);
4158                if (!request->settings) {
4159                    rc = channel->request(output.buffer, frameNumber,
4160                            NULL, mPrevParameters, indexUsed);
4161                } else {
4162                    rc = channel->request(output.buffer, frameNumber,
4163                            NULL, mParameters, indexUsed);
4164                }
4165                if (rc < 0) {
4166                    LOGE("Fail to request on picture channel");
4167                    pthread_mutex_unlock(&mMutex);
4168                    return rc;
4169                }
4170
4171                uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4172                uint32_t j = 0;
4173                for (j = 0; j < streamsArray.num_streams; j++) {
4174                    if (streamsArray.stream_request[j].streamID == streamId) {
4175                      if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4176                          streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4177                      else
4178                          streamsArray.stream_request[j].buf_index = indexUsed;
4179                        break;
4180                    }
4181                }
4182                if (j == streamsArray.num_streams) {
4183                    LOGE("Did not find matching stream to update index");
4184                    assert(0);
4185                }
4186
4187                pendingBufferIter->need_metadata = true;
4188                streams_need_metadata++;
4189            }
4190        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4191            bool needMetadata = false;
4192            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4193            rc = yuvChannel->request(output.buffer, frameNumber,
4194                    pInputBuffer,
4195                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
4196            if (rc < 0) {
4197                LOGE("Fail to request on YUV channel");
4198                pthread_mutex_unlock(&mMutex);
4199                return rc;
4200            }
4201
4202            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4203            uint32_t j = 0;
4204            for (j = 0; j < streamsArray.num_streams; j++) {
4205                if (streamsArray.stream_request[j].streamID == streamId) {
4206                    if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4207                        streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4208                    else
4209                        streamsArray.stream_request[j].buf_index = indexUsed;
4210                    break;
4211                }
4212            }
4213            if (j == streamsArray.num_streams) {
4214                LOGE("Did not find matching stream to update index");
4215                assert(0);
4216            }
4217
4218            pendingBufferIter->need_metadata = needMetadata;
4219            if (needMetadata)
4220                streams_need_metadata += 1;
4221            LOGD("calling YUV channel request, need_metadata is %d",
4222                     needMetadata);
4223        } else {
4224            LOGD("request with buffer %p, frame_number %d",
4225                  output.buffer, frameNumber);
4226
4227            rc = channel->request(output.buffer, frameNumber, indexUsed);
4228
4229            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4230            uint32_t j = 0;
4231            for (j = 0; j < streamsArray.num_streams; j++) {
4232                if (streamsArray.stream_request[j].streamID == streamId) {
4233                    if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4234                        streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4235                    else
4236                        streamsArray.stream_request[j].buf_index = indexUsed;
4237                    break;
4238                }
4239            }
4240            if (j == streamsArray.num_streams) {
4241                LOGE("Did not find matching stream to update index");
4242                assert(0);
4243            }
4244
4245            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4246                    && mBatchSize) {
4247                mToBeQueuedVidBufs++;
4248                if (mToBeQueuedVidBufs == mBatchSize) {
4249                    channel->queueBatchBuf();
4250                }
4251            }
4252            if (rc < 0) {
4253                LOGE("request failed");
4254                pthread_mutex_unlock(&mMutex);
4255                return rc;
4256            }
4257        }
4258        pendingBufferIter++;
4259    }
4260
4261    //If 2 streams have need_metadata set to true, fail the request, unless
4262    //we copy/reference count the metadata buffer
4263    if (streams_need_metadata > 1) {
4264        LOGE("not supporting request in which two streams requires"
4265                " 2 HAL metadata for reprocessing");
4266        pthread_mutex_unlock(&mMutex);
4267        return -EINVAL;
4268    }
4269
4270    if (request->input_buffer == NULL) {
4271        /* Set the parameters to backend:
4272         * - For every request in NORMAL MODE
4273         * - For every request in HFR mode during preview only case
4274         * - Once every batch in HFR mode during video recording
4275         */
4276        if (!mBatchSize ||
4277           (mBatchSize && !isVidBufRequested) ||
4278           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4279            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4280                     mBatchSize, isVidBufRequested,
4281                    mToBeQueuedVidBufs);
4282
4283            if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4284                for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4285                    uint32_t m = 0;
4286                    for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4287                        if (streamsArray.stream_request[k].streamID ==
4288                                mBatchedStreamsArray.stream_request[m].streamID)
4289                            break;
4290                        }
4291                        if (m == mBatchedStreamsArray.num_streams) {
4292                            mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4293                                streamsArray.stream_request[k].streamID;
4294                            mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4295                                streamsArray.stream_request[k].buf_index;
4296                            mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4297                        }
4298                }
4299                streamsArray = mBatchedStreamsArray;
4300            }
4301            /* Update stream id of all the requested buffers */
4302            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4303                LOGE("Failed to set stream type mask in the parameters");
4304                return BAD_VALUE;
4305            }
4306
4307            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4308                    mParameters);
4309            if (rc < 0) {
4310                LOGE("set_parms failed");
4311            }
4312            /* reset to zero coz, the batch is queued */
4313            mToBeQueuedVidBufs = 0;
4314            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4315            memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4316        } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4317            for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4318                uint32_t m = 0;
4319                for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4320                    if (streamsArray.stream_request[k].streamID ==
4321                            mBatchedStreamsArray.stream_request[m].streamID)
4322                        break;
4323                }
4324                if (m == mBatchedStreamsArray.num_streams) {
4325                    mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4326                        streamsArray.stream_request[k].streamID;
4327                    mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4328                        streamsArray.stream_request[k].buf_index;
4329                    mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4330                }
4331            }
4332        }
4333        mPendingLiveRequest++;
4334    }
4335
4336    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4337
4338    mState = STARTED;
4339    // Added a timed condition wait
4340    struct timespec ts;
4341    uint8_t isValidTimeout = 1;
4342    rc = clock_gettime(CLOCK_REALTIME, &ts);
4343    if (rc < 0) {
4344      isValidTimeout = 0;
4345      LOGE("Error reading the real time clock!!");
4346    }
4347    else {
4348      // Make timeout as 5 sec for request to be honored
4349      ts.tv_sec += 5;
4350    }
4351    //Block on conditional variable
4352    while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4353            (mState != ERROR) && (mState != DEINIT)) {
4354        if (!isValidTimeout) {
4355            LOGD("Blocking on conditional wait");
4356            pthread_cond_wait(&mRequestCond, &mMutex);
4357        }
4358        else {
4359            LOGD("Blocking on timed conditional wait");
4360            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4361            if (rc == ETIMEDOUT) {
4362                rc = -ENODEV;
4363                LOGE("Unblocked on timeout!!!!");
4364                break;
4365            }
4366        }
4367        LOGD("Unblocked");
4368        if (mWokenUpByDaemon) {
4369            mWokenUpByDaemon = false;
4370            if (mPendingLiveRequest < mMaxInFlightRequests)
4371                break;
4372        }
4373    }
4374    pthread_mutex_unlock(&mMutex);
4375
4376    return rc;
4377}
4378
4379/*===========================================================================
4380 * FUNCTION   : dump
4381 *
4382 * DESCRIPTION:
4383 *
4384 * PARAMETERS :
4385 *
4386 *
4387 * RETURN     :
4388 *==========================================================================*/
4389void QCamera3HardwareInterface::dump(int fd)
4390{
4391    pthread_mutex_lock(&mMutex);
4392    dprintf(fd, "\n Camera HAL3 information Begin \n");
4393
4394    dprintf(fd, "\nNumber of pending requests: %zu \n",
4395        mPendingRequestsList.size());
4396    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4397    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4398    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4399    for(pendingRequestIterator i = mPendingRequestsList.begin();
4400            i != mPendingRequestsList.end(); i++) {
4401        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4402        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4403        i->input_buffer);
4404    }
4405    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4406                mPendingBuffersMap.get_num_overall_buffers());
4407    dprintf(fd, "-------+------------------\n");
4408    dprintf(fd, " Frame | Stream type mask \n");
4409    dprintf(fd, "-------+------------------\n");
4410    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4411        for(auto &j : req.mPendingBufferList) {
4412            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4413            dprintf(fd, " %5d | %11d \n",
4414                    req.frame_number, channel->getStreamTypeMask());
4415        }
4416    }
4417    dprintf(fd, "-------+------------------\n");
4418
4419    dprintf(fd, "\nPending frame drop list: %zu\n",
4420        mPendingFrameDropList.size());
4421    dprintf(fd, "-------+-----------\n");
4422    dprintf(fd, " Frame | Stream ID \n");
4423    dprintf(fd, "-------+-----------\n");
4424    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4425        i != mPendingFrameDropList.end(); i++) {
4426        dprintf(fd, " %5d | %9d \n",
4427            i->frame_number, i->stream_ID);
4428    }
4429    dprintf(fd, "-------+-----------\n");
4430
4431    dprintf(fd, "\n Camera HAL3 information End \n");
4432
4433    /* use dumpsys media.camera as trigger to send update debug level event */
4434    mUpdateDebugLevel = true;
4435    pthread_mutex_unlock(&mMutex);
4436    return;
4437}
4438
4439/*===========================================================================
4440 * FUNCTION   : flush
4441 *
4442 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4443 *              conditionally restarts channels
4444 *
4445 * PARAMETERS :
4446 *  @ restartChannels: re-start all channels
4447 *
4448 *
4449 * RETURN     :
4450 *          0 on success
4451 *          Error code on failure
4452 *==========================================================================*/
4453int QCamera3HardwareInterface::flush(bool restartChannels)
4454{
4455    KPI_ATRACE_CALL();
4456    int32_t rc = NO_ERROR;
4457
4458    LOGD("Unblocking Process Capture Request");
4459    pthread_mutex_lock(&mMutex);
4460    mFlush = true;
4461    pthread_mutex_unlock(&mMutex);
4462
4463    rc = stopAllChannels();
4464    // unlink of dualcam
4465    if (mIsDeviceLinked) {
4466        m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4467        pthread_mutex_lock(&gCamLock);
4468
4469        if (mIsMainCamera == 1) {
4470            m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4471            m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4472            // related session id should be session id of linked session
4473            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4474        } else {
4475            m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4476            m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4477            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4478        }
4479        pthread_mutex_unlock(&gCamLock);
4480
4481        rc = mCameraHandle->ops->sync_related_sensors(
4482                mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4483        if (rc < 0) {
4484            LOGE("Dualcam: Unlink failed, but still proceed to close");
4485        }
4486    }
4487
4488    if (rc < 0) {
4489        LOGE("stopAllChannels failed");
4490        return rc;
4491    }
4492    if (mChannelHandle) {
4493        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4494                mChannelHandle);
4495    }
4496
4497    // Reset bundle info
4498    rc = setBundleInfo();
4499    if (rc < 0) {
4500        LOGE("setBundleInfo failed %d", rc);
4501        return rc;
4502    }
4503
4504    // Mutex Lock
4505    pthread_mutex_lock(&mMutex);
4506
4507    // Unblock process_capture_request
4508    mPendingLiveRequest = 0;
4509    pthread_cond_signal(&mRequestCond);
4510
4511    rc = notifyErrorForPendingRequests();
4512    if (rc < 0) {
4513        LOGE("notifyErrorForPendingRequests failed");
4514        pthread_mutex_unlock(&mMutex);
4515        return rc;
4516    }
4517
4518    mFlush = false;
4519
4520    // Start the Streams/Channels
4521    if (restartChannels) {
4522        rc = startAllChannels();
4523        if (rc < 0) {
4524            LOGE("startAllChannels failed");
4525            pthread_mutex_unlock(&mMutex);
4526            return rc;
4527        }
4528    }
4529
4530    if (mChannelHandle) {
4531        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4532                    mChannelHandle);
4533        if (rc < 0) {
4534            LOGE("start_channel failed");
4535            pthread_mutex_unlock(&mMutex);
4536            return rc;
4537        }
4538    }
4539
4540    pthread_mutex_unlock(&mMutex);
4541
4542    return 0;
4543}
4544
4545/*===========================================================================
4546 * FUNCTION   : flushPerf
4547 *
4548 * DESCRIPTION: This is the performance optimization version of flush that does
4549 *              not use stream off, rather flushes the system
4550 *
4551 * PARAMETERS :
4552 *
4553 *
4554 * RETURN     : 0 : success
4555 *              -EINVAL: input is malformed (device is not valid)
4556 *              -ENODEV: if the device has encountered a serious error
4557 *==========================================================================*/
4558int QCamera3HardwareInterface::flushPerf()
4559{
4560    ATRACE_CALL();
4561    int32_t rc = 0;
4562    struct timespec timeout;
4563    bool timed_wait = false;
4564
4565    pthread_mutex_lock(&mMutex);
4566    mFlushPerf = true;
4567    mPendingBuffersMap.numPendingBufsAtFlush =
4568        mPendingBuffersMap.get_num_overall_buffers();
4569    LOGD("Calling flush. Wait for %d buffers to return",
4570        mPendingBuffersMap.numPendingBufsAtFlush);
4571
4572    /* send the flush event to the backend */
4573    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4574    if (rc < 0) {
4575        LOGE("Error in flush: IOCTL failure");
4576        mFlushPerf = false;
4577        pthread_mutex_unlock(&mMutex);
4578        return -ENODEV;
4579    }
4580
4581    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4582        LOGD("No pending buffers in HAL, return flush");
4583        mFlushPerf = false;
4584        pthread_mutex_unlock(&mMutex);
4585        return rc;
4586    }
4587
4588    /* wait on a signal that buffers were received */
4589    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4590    if (rc < 0) {
4591        LOGE("Error reading the real time clock, cannot use timed wait");
4592    } else {
4593        timeout.tv_sec += FLUSH_TIMEOUT;
4594        timed_wait = true;
4595    }
4596
4597    //Block on conditional variable
4598    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4599        LOGD("Waiting on mBuffersCond");
4600        if (!timed_wait) {
4601            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4602            if (rc != 0) {
4603                 LOGE("pthread_cond_wait failed due to rc = %s",
4604                        strerror(rc));
4605                 break;
4606            }
4607        } else {
4608            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4609            if (rc != 0) {
4610                LOGE("pthread_cond_timedwait failed due to rc = %s",
4611                            strerror(rc));
4612                break;
4613            }
4614        }
4615    }
4616    if (rc != 0) {
4617        mFlushPerf = false;
4618        pthread_mutex_unlock(&mMutex);
4619        return -ENODEV;
4620    }
4621
4622    LOGD("Received buffers, now safe to return them");
4623
4624    //make sure the channels handle flush
4625    //currently only required for the picture channel to release snapshot resources
4626    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4627            it != mStreamInfo.end(); it++) {
4628        QCamera3Channel *channel = (*it)->channel;
4629        if (channel) {
4630            rc = channel->flush();
4631            if (rc) {
4632               LOGE("Flushing the channels failed with error %d", rc);
4633               // even though the channel flush failed we need to continue and
4634               // return the buffers we have to the framework, however the return
4635               // value will be an error
4636               rc = -ENODEV;
4637            }
4638        }
4639    }
4640
4641    /* notify the frameworks and send errored results */
4642    rc = notifyErrorForPendingRequests();
4643    if (rc < 0) {
4644        LOGE("notifyErrorForPendingRequests failed");
4645        pthread_mutex_unlock(&mMutex);
4646        return rc;
4647    }
4648
4649    //unblock process_capture_request
4650    mPendingLiveRequest = 0;
4651    unblockRequestIfNecessary();
4652
4653    mFlushPerf = false;
4654    pthread_mutex_unlock(&mMutex);
4655    LOGD ("Flush Operation complete. rc = %d", rc);
4656    return rc;
4657}
4658
4659/*===========================================================================
4660 * FUNCTION   : handleCameraDeviceError
4661 *
4662 * DESCRIPTION: This function calls internal flush and notifies the error to
4663 *              framework and updates the state variable.
4664 *
4665 * PARAMETERS : None
4666 *
4667 * RETURN     : NO_ERROR on Success
4668 *              Error code on failure
4669 *==========================================================================*/
4670int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4671{
4672    int32_t rc = NO_ERROR;
4673
4674    pthread_mutex_lock(&mMutex);
4675    if (mState != ERROR) {
4676        //if mState != ERROR, nothing to be done
4677        pthread_mutex_unlock(&mMutex);
4678        return NO_ERROR;
4679    }
4680    pthread_mutex_unlock(&mMutex);
4681
4682    rc = flush(false /* restart channels */);
4683    if (NO_ERROR != rc) {
4684        LOGE("internal flush to handle mState = ERROR failed");
4685    }
4686
4687    pthread_mutex_lock(&mMutex);
4688    mState = DEINIT;
4689    pthread_mutex_unlock(&mMutex);
4690
4691    camera3_notify_msg_t notify_msg;
4692    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4693    notify_msg.type = CAMERA3_MSG_ERROR;
4694    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4695    notify_msg.message.error.error_stream = NULL;
4696    notify_msg.message.error.frame_number = 0;
4697    mCallbackOps->notify(mCallbackOps, &notify_msg);
4698
4699    return rc;
4700}
4701
4702/*===========================================================================
4703 * FUNCTION   : captureResultCb
4704 *
4705 * DESCRIPTION: Callback handler for all capture result
4706 *              (streams, as well as metadata)
4707 *
4708 * PARAMETERS :
4709 *   @metadata : metadata information
4710 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4711 *               NULL if metadata.
4712 *
4713 * RETURN     : NONE
4714 *==========================================================================*/
4715void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4716                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4717{
4718    if (metadata_buf) {
4719        if (mBatchSize) {
4720            handleBatchMetadata(metadata_buf,
4721                    true /* free_and_bufdone_meta_buf */);
4722        } else { /* mBatchSize = 0 */
4723            hdrPlusPerfLock(metadata_buf);
4724            pthread_mutex_lock(&mMutex);
4725            handleMetadataWithLock(metadata_buf,
4726                    true /* free_and_bufdone_meta_buf */,
4727                    false /* first frame of batch metadata */ );
4728            pthread_mutex_unlock(&mMutex);
4729        }
4730    } else if (isInputBuffer) {
4731        pthread_mutex_lock(&mMutex);
4732        handleInputBufferWithLock(frame_number);
4733        pthread_mutex_unlock(&mMutex);
4734    } else {
4735        pthread_mutex_lock(&mMutex);
4736        handleBufferWithLock(buffer, frame_number);
4737        pthread_mutex_unlock(&mMutex);
4738    }
4739    return;
4740}
4741
4742/*===========================================================================
4743 * FUNCTION   : getReprocessibleOutputStreamId
4744 *
4745 * DESCRIPTION: Get source output stream id for the input reprocess stream
4746 *              based on size and format, which would be the largest
4747 *              output stream if an input stream exists.
4748 *
4749 * PARAMETERS :
4750 *   @id      : return the stream id if found
4751 *
4752 * RETURN     : int32_t type of status
4753 *              NO_ERROR  -- success
4754 *              none-zero failure code
4755 *==========================================================================*/
4756int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4757{
4758    /* check if any output or bidirectional stream with the same size and format
4759       and return that stream */
4760    if ((mInputStreamInfo.dim.width > 0) &&
4761            (mInputStreamInfo.dim.height > 0)) {
4762        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4763                it != mStreamInfo.end(); it++) {
4764
4765            camera3_stream_t *stream = (*it)->stream;
4766            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4767                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4768                    (stream->format == mInputStreamInfo.format)) {
4769                // Usage flag for an input stream and the source output stream
4770                // may be different.
4771                LOGD("Found reprocessible output stream! %p", *it);
4772                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4773                         stream->usage, mInputStreamInfo.usage);
4774
4775                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4776                if (channel != NULL && channel->mStreams[0]) {
4777                    id = channel->mStreams[0]->getMyServerID();
4778                    return NO_ERROR;
4779                }
4780            }
4781        }
4782    } else {
4783        LOGD("No input stream, so no reprocessible output stream");
4784    }
4785    return NAME_NOT_FOUND;
4786}
4787
4788/*===========================================================================
4789 * FUNCTION   : lookupFwkName
4790 *
4791 * DESCRIPTION: In case the enum is not same in fwk and backend
4792 *              make sure the parameter is correctly propogated
4793 *
4794 * PARAMETERS  :
4795 *   @arr      : map between the two enums
4796 *   @len      : len of the map
4797 *   @hal_name : name of the hal_parm to map
4798 *
4799 * RETURN     : int type of status
4800 *              fwk_name  -- success
4801 *              none-zero failure code
4802 *==========================================================================*/
4803template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4804        size_t len, halType hal_name)
4805{
4806
4807    for (size_t i = 0; i < len; i++) {
4808        if (arr[i].hal_name == hal_name) {
4809            return arr[i].fwk_name;
4810        }
4811    }
4812
4813    /* Not able to find matching framework type is not necessarily
4814     * an error case. This happens when mm-camera supports more attributes
4815     * than the frameworks do */
4816    LOGH("Cannot find matching framework type");
4817    return NAME_NOT_FOUND;
4818}
4819
4820/*===========================================================================
4821 * FUNCTION   : lookupHalName
4822 *
4823 * DESCRIPTION: In case the enum is not same in fwk and backend
4824 *              make sure the parameter is correctly propogated
4825 *
4826 * PARAMETERS  :
4827 *   @arr      : map between the two enums
4828 *   @len      : len of the map
4829 *   @fwk_name : name of the hal_parm to map
4830 *
4831 * RETURN     : int32_t type of status
4832 *              hal_name  -- success
4833 *              none-zero failure code
4834 *==========================================================================*/
4835template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4836        size_t len, fwkType fwk_name)
4837{
4838    for (size_t i = 0; i < len; i++) {
4839        if (arr[i].fwk_name == fwk_name) {
4840            return arr[i].hal_name;
4841        }
4842    }
4843
4844    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4845    return NAME_NOT_FOUND;
4846}
4847
4848/*===========================================================================
4849 * FUNCTION   : lookupProp
4850 *
4851 * DESCRIPTION: lookup a value by its name
4852 *
4853 * PARAMETERS :
4854 *   @arr     : map between the two enums
4855 *   @len     : size of the map
4856 *   @name    : name to be looked up
4857 *
4858 * RETURN     : Value if found
4859 *              CAM_CDS_MODE_MAX if not found
4860 *==========================================================================*/
4861template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4862        size_t len, const char *name)
4863{
4864    if (name) {
4865        for (size_t i = 0; i < len; i++) {
4866            if (!strcmp(arr[i].desc, name)) {
4867                return arr[i].val;
4868            }
4869        }
4870    }
4871    return CAM_CDS_MODE_MAX;
4872}
4873
4874/*===========================================================================
4875 *
4876 * DESCRIPTION:
4877 *
4878 * PARAMETERS :
4879 *   @metadata : metadata information from callback
4880 *   @timestamp: metadata buffer timestamp
4881 *   @request_id: request id
4882 *   @jpegMetadata: additional jpeg metadata
4883 *   @hybrid_ae_enable: whether hybrid ae is enabled
4884 *   // DevCamDebug metadata
4885 *   @DevCamDebug_meta_enable: enable DevCamDebug meta
4886 *   // DevCamDebug metadata end
4887 *   @pprocDone: whether internal offline postprocsesing is done
4888 *
4889 * RETURN     : camera_metadata_t*
4890 *              metadata in a format specified by fwk
4891 *==========================================================================*/
4892camera_metadata_t*
4893QCamera3HardwareInterface::translateFromHalMetadata(
4894                                 metadata_buffer_t *metadata,
4895                                 nsecs_t timestamp,
4896                                 int32_t request_id,
4897                                 const CameraMetadata& jpegMetadata,
4898                                 uint8_t pipeline_depth,
4899                                 uint8_t capture_intent,
4900                                 uint8_t hybrid_ae_enable,
4901                                 /* DevCamDebug metadata translateFromHalMetadata argument */
4902                                 uint8_t DevCamDebug_meta_enable,
4903                                 /* DevCamDebug metadata end */
4904                                 bool pprocDone,
4905                                 uint8_t fwk_cacMode,
4906                                 bool firstMetadataInBatch)
4907{
4908    CameraMetadata camMetadata;
4909    camera_metadata_t *resultMetadata;
4910
4911    if (mBatchSize && !firstMetadataInBatch) {
4912        /* In batch mode, use cached metadata from the first metadata
4913            in the batch */
4914        camMetadata.clear();
4915        camMetadata = mCachedMetadata;
4916    }
4917
4918    if (jpegMetadata.entryCount())
4919        camMetadata.append(jpegMetadata);
4920
4921    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4922    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4923    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4924    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4925    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4926    if (mBatchSize == 0) {
4927        // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
4928        camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
4929    }
4930
4931    if (mBatchSize && !firstMetadataInBatch) {
4932        /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4933        resultMetadata = camMetadata.release();
4934        return resultMetadata;
4935    }
4936
4937    // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
4938    // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
4939    if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
4940        // DevCamDebug metadata translateFromHalMetadata AF
4941        IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
4942                CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
4943            int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
4944            camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
4945        }
4946        IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
4947                CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
4948            int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
4949            camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
4950        }
4951        IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
4952                CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
4953            int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
4954            camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
4955        }
4956        IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
4957                CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
4958            int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
4959            camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
4960        }
4961        IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
4962                CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
4963            int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
4964            camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
4965        }
4966        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
4967                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
4968            int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
4969                *DevCamDebug_af_monitor_pdaf_target_pos;
4970            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
4971                &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
4972        }
4973        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
4974                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
4975            int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
4976                *DevCamDebug_af_monitor_pdaf_confidence;
4977            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
4978                &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
4979        }
4980        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
4981                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
4982            int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
4983            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
4984                &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
4985        }
4986        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
4987                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
4988            int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
4989                *DevCamDebug_af_monitor_tof_target_pos;
4990            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
4991                &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
4992        }
4993        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
4994                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
4995            int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
4996                *DevCamDebug_af_monitor_tof_confidence;
4997            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
4998                &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
4999        }
5000        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5001                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5002            int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5003            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5004                &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5005        }
5006        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5007                CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5008            int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5009            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5010                &fwk_DevCamDebug_af_monitor_type_select, 1);
5011        }
5012        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5013                CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5014            int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5015            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5016                &fwk_DevCamDebug_af_monitor_refocus, 1);
5017        }
5018        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5019                CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5020            int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5021            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5022                &fwk_DevCamDebug_af_monitor_target_pos, 1);
5023        }
5024        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5025                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5026            int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5027                *DevCamDebug_af_search_pdaf_target_pos;
5028            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5029                &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5030        }
5031        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5032                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5033            int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5034            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5035                &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5036        }
5037        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5038                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5039            int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5040            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5041                &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5042        }
5043        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5044                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5045            int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5046            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5047                &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5048        }
5049        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5050                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5051            int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5052            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5053                &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5054        }
5055        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5056                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5057            int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5058                *DevCamDebug_af_search_tof_target_pos;
5059            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5060                &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5061        }
5062        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5063                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5064            int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5065            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5066                &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5067        }
5068        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5069                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5070            int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5071            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5072                &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5073        }
5074        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5075                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5076            int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5077            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5078                &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5079        }
5080        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5081                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5082            int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5083            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5084                &fwk_DevCamDebug_af_search_tof_confidence, 1);
5085        }
5086        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5087                CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5088            int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5089            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5090                &fwk_DevCamDebug_af_search_type_select, 1);
5091        }
5092        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5093                CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5094            int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5095            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5096                &fwk_DevCamDebug_af_search_next_pos, 1);
5097        }
5098        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5099                CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5100            int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5101            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5102                &fwk_DevCamDebug_af_search_target_pos, 1);
5103        }
5104        // DevCamDebug metadata translateFromHalMetadata AEC
5105        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5106                CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5107            int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5108            camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5109    }
5110        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5111                CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5112            int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5113            camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5114        }
5115        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5116                CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5117            int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5118            camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5119        }
5120        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5121                CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5122            int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5123            camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5124        }
5125        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5126                CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5127            int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5128            camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5129        }
5130        IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5131                CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5132            float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5133            camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5134        }
5135        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5136                CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5137            int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5138            camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5139        }
5140        IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5141                CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5142            float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5143            camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5144        }
5145        // DevCamDebug metadata translateFromHalMetadata AWB
5146        IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5147                CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5148            float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5149            camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5150        }
5151        IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5152                CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5153            float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5154            camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5155        }
5156        IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5157                CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5158            float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5159            camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5160        }
5161        IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5162                CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5163            int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5164            camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5165        }
5166        IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5167                CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5168            int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5169            camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5170        }
5171    }
5172    // atrace_end(ATRACE_TAG_ALWAYS);
5173
5174    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5175        int64_t fwk_frame_number = *frame_number;
5176        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5177    }
5178
5179    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5180        int32_t fps_range[2];
5181        fps_range[0] = (int32_t)float_range->min_fps;
5182        fps_range[1] = (int32_t)float_range->max_fps;
5183        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5184                                      fps_range, 2);
5185        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5186             fps_range[0], fps_range[1]);
5187    }
5188
5189    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5190        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5191    }
5192
5193    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5194        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5195                METADATA_MAP_SIZE(SCENE_MODES_MAP),
5196                *sceneMode);
5197        if (NAME_NOT_FOUND != val) {
5198            uint8_t fwkSceneMode = (uint8_t)val;
5199            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5200            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5201                     fwkSceneMode);
5202        }
5203    }
5204
5205    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5206        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5207        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5208    }
5209
5210    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5211        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5212        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5213    }
5214
5215    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5216        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5217        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5218    }
5219
5220    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5221            CAM_INTF_META_EDGE_MODE, metadata) {
5222        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5223    }
5224
5225    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5226        uint8_t fwk_flashPower = (uint8_t) *flashPower;
5227        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5228    }
5229
5230    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5231        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5232    }
5233
5234    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5235        if (0 <= *flashState) {
5236            uint8_t fwk_flashState = (uint8_t) *flashState;
5237            if (!gCamCapability[mCameraId]->flash_available) {
5238                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5239            }
5240            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5241        }
5242    }
5243
5244    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5245        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5246        if (NAME_NOT_FOUND != val) {
5247            uint8_t fwk_flashMode = (uint8_t)val;
5248            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5249        }
5250    }
5251
5252    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5253        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5254        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5255    }
5256
5257    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5258        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5259    }
5260
5261    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5262        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5263    }
5264
5265    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5266        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5267    }
5268
5269    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5270        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5271        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5272    }
5273
5274    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5275        uint8_t fwk_videoStab = (uint8_t) *videoStab;
5276        LOGD("fwk_videoStab = %d", fwk_videoStab);
5277        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5278    } else {
5279        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5280        // and so hardcoding the Video Stab result to OFF mode.
5281        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5282        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5283        LOGD("%s: EIS result default to OFF mode", __func__);
5284    }
5285
5286    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5287        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5288        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5289    }
5290
5291    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5292        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5293    }
5294
5295    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5296        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5297        float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
5298
5299        adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5300              gCamCapability[mCameraId]->color_arrangement);
5301
5302        LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
5303          blackLevelAppliedPattern->cam_black_level[0],
5304          blackLevelAppliedPattern->cam_black_level[1],
5305          blackLevelAppliedPattern->cam_black_level[2],
5306          blackLevelAppliedPattern->cam_black_level[3]);
5307        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5308                BLACK_LEVEL_PATTERN_CNT);
5309
5310        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5311        // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
5312        // depth space.
5313        fwk_blackLevelInd[0] /= 4.0;
5314        fwk_blackLevelInd[1] /= 4.0;
5315        fwk_blackLevelInd[2] /= 4.0;
5316        fwk_blackLevelInd[3] /= 4.0;
5317        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
5318                BLACK_LEVEL_PATTERN_CNT);
5319    }
5320
5321    // Fixed whitelevel is used by ISP/Sensor
5322    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5323            &gCamCapability[mCameraId]->white_level, 1);
5324
5325    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5326            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5327        int32_t scalerCropRegion[4];
5328        scalerCropRegion[0] = hScalerCropRegion->left;
5329        scalerCropRegion[1] = hScalerCropRegion->top;
5330        scalerCropRegion[2] = hScalerCropRegion->width;
5331        scalerCropRegion[3] = hScalerCropRegion->height;
5332
5333        // Adjust crop region from sensor output coordinate system to active
5334        // array coordinate system.
5335        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5336                scalerCropRegion[2], scalerCropRegion[3]);
5337
5338        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5339    }
5340
5341    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5342        LOGD("sensorExpTime = %lld", *sensorExpTime);
5343        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5344    }
5345
5346    IF_META_AVAILABLE(int64_t, sensorFameDuration,
5347            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5348        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5349        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5350    }
5351
5352    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5353            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5354        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5355        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5356                sensorRollingShutterSkew, 1);
5357    }
5358
5359    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5360        LOGD("sensorSensitivity = %d", *sensorSensitivity);
5361        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5362
5363        //calculate the noise profile based on sensitivity
5364        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5365        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5366        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5367        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5368            noise_profile[i]   = noise_profile_S;
5369            noise_profile[i+1] = noise_profile_O;
5370        }
5371        LOGD("noise model entry (S, O) is (%f, %f)",
5372                noise_profile_S, noise_profile_O);
5373        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5374                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5375    }
5376
5377    int32_t fwk_ispSensitivity = 100;
5378    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5379        fwk_ispSensitivity = (int32_t) *ispSensitivity;
5380    }
5381    IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5382        fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5383    }
5384    camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5385
5386    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5387        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5388        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5389    }
5390
5391    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5392        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5393                *faceDetectMode);
5394        if (NAME_NOT_FOUND != val) {
5395            uint8_t fwk_faceDetectMode = (uint8_t)val;
5396            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5397
5398            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5399                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5400                        CAM_INTF_META_FACE_DETECTION, metadata) {
5401                    uint8_t numFaces = MIN(
5402                            faceDetectionInfo->num_faces_detected, MAX_ROI);
5403                    int32_t faceIds[MAX_ROI];
5404                    uint8_t faceScores[MAX_ROI];
5405                    int32_t faceRectangles[MAX_ROI * 4];
5406                    int32_t faceLandmarks[MAX_ROI * 6];
5407                    size_t j = 0, k = 0;
5408
5409                    for (size_t i = 0; i < numFaces; i++) {
5410                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5411                        // Adjust crop region from sensor output coordinate system to active
5412                        // array coordinate system.
5413                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5414                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
5415                                rect.width, rect.height);
5416
5417                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5418                                faceRectangles+j, -1);
5419
5420                        j+= 4;
5421                    }
5422                    if (numFaces <= 0) {
5423                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5424                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5425                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5426                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5427                    }
5428
5429                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5430                            numFaces);
5431                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5432                            faceRectangles, numFaces * 4U);
5433                    if (fwk_faceDetectMode ==
5434                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5435                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5436                                CAM_INTF_META_FACE_LANDMARK, metadata) {
5437
5438                            for (size_t i = 0; i < numFaces; i++) {
5439                                // Map the co-ordinate sensor output coordinate system to active
5440                                // array coordinate system.
5441                                mCropRegionMapper.toActiveArray(
5442                                        landmarks->face_landmarks[i].left_eye_center.x,
5443                                        landmarks->face_landmarks[i].left_eye_center.y);
5444                                mCropRegionMapper.toActiveArray(
5445                                        landmarks->face_landmarks[i].right_eye_center.x,
5446                                        landmarks->face_landmarks[i].right_eye_center.y);
5447                                mCropRegionMapper.toActiveArray(
5448                                        landmarks->face_landmarks[i].mouth_center.x,
5449                                        landmarks->face_landmarks[i].mouth_center.y);
5450
5451                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5452                                k+= 6;
5453                            }
5454                        }
5455
5456                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5457                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5458                                faceLandmarks, numFaces * 6U);
5459                   }
5460                }
5461            }
5462        }
5463    }
5464
5465    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5466        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5467        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5468    }
5469
5470    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5471            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5472        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5473        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5474    }
5475
5476    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5477            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5478        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5479                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5480    }
5481
5482    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5483            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5484        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5485                CAM_MAX_SHADING_MAP_HEIGHT);
5486        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5487                CAM_MAX_SHADING_MAP_WIDTH);
5488        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5489                lensShadingMap->lens_shading, 4U * map_width * map_height);
5490    }
5491
5492    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5493        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5494        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5495    }
5496
5497    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5498        //Populate CAM_INTF_META_TONEMAP_CURVES
5499        /* ch0 = G, ch 1 = B, ch 2 = R*/
5500        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5501            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5502                     tonemap->tonemap_points_cnt,
5503                    CAM_MAX_TONEMAP_CURVE_SIZE);
5504            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5505        }
5506
5507        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5508                        &tonemap->curves[0].tonemap_points[0][0],
5509                        tonemap->tonemap_points_cnt * 2);
5510
5511        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5512                        &tonemap->curves[1].tonemap_points[0][0],
5513                        tonemap->tonemap_points_cnt * 2);
5514
5515        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5516                        &tonemap->curves[2].tonemap_points[0][0],
5517                        tonemap->tonemap_points_cnt * 2);
5518    }
5519
5520    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5521            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5522        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5523                CC_GAINS_COUNT);
5524    }
5525
5526    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5527            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5528        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5529                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5530                CC_MATRIX_COLS * CC_MATRIX_ROWS);
5531    }
5532
5533    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5534            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5535        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5536            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5537                     toneCurve->tonemap_points_cnt,
5538                    CAM_MAX_TONEMAP_CURVE_SIZE);
5539            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5540        }
5541        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5542                (float*)toneCurve->curve.tonemap_points,
5543                toneCurve->tonemap_points_cnt * 2);
5544    }
5545
5546    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5547            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5548        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5549                predColorCorrectionGains->gains, 4);
5550    }
5551
5552    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5553            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5554        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5555                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5556                CC_MATRIX_ROWS * CC_MATRIX_COLS);
5557    }
5558
5559    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5560        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5561    }
5562
5563    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5564        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5565        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5566    }
5567
5568    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5569        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5570        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5571    }
5572
5573    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5574        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5575                *effectMode);
5576        if (NAME_NOT_FOUND != val) {
5577            uint8_t fwk_effectMode = (uint8_t)val;
5578            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5579        }
5580    }
5581
5582    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5583            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5584        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5585                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5586        if (NAME_NOT_FOUND != fwk_testPatternMode) {
5587            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5588        }
5589        int32_t fwk_testPatternData[4];
5590        fwk_testPatternData[0] = testPatternData->r;
5591        fwk_testPatternData[3] = testPatternData->b;
5592        switch (gCamCapability[mCameraId]->color_arrangement) {
5593        case CAM_FILTER_ARRANGEMENT_RGGB:
5594        case CAM_FILTER_ARRANGEMENT_GRBG:
5595            fwk_testPatternData[1] = testPatternData->gr;
5596            fwk_testPatternData[2] = testPatternData->gb;
5597            break;
5598        case CAM_FILTER_ARRANGEMENT_GBRG:
5599        case CAM_FILTER_ARRANGEMENT_BGGR:
5600            fwk_testPatternData[2] = testPatternData->gr;
5601            fwk_testPatternData[1] = testPatternData->gb;
5602            break;
5603        default:
5604            LOGE("color arrangement %d is not supported",
5605                gCamCapability[mCameraId]->color_arrangement);
5606            break;
5607        }
5608        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5609    }
5610
5611    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5612        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5613    }
5614
5615    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5616        String8 str((const char *)gps_methods);
5617        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5618    }
5619
5620    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5621        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5622    }
5623
5624    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5625        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5626    }
5627
5628    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5629        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5630        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5631    }
5632
5633    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5634        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5635        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5636    }
5637
5638    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5639        int32_t fwk_thumb_size[2];
5640        fwk_thumb_size[0] = thumb_size->width;
5641        fwk_thumb_size[1] = thumb_size->height;
5642        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5643    }
5644
5645    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5646        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5647                privateData,
5648                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5649    }
5650
5651    if (metadata->is_tuning_params_valid) {
5652        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5653        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5654        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5655
5656
5657        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5658                sizeof(uint32_t));
5659        data += sizeof(uint32_t);
5660
5661        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5662                sizeof(uint32_t));
5663        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5664        data += sizeof(uint32_t);
5665
5666        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5667                sizeof(uint32_t));
5668        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5669        data += sizeof(uint32_t);
5670
5671        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5672                sizeof(uint32_t));
5673        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5674        data += sizeof(uint32_t);
5675
5676        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5677                sizeof(uint32_t));
5678        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5679        data += sizeof(uint32_t);
5680
5681        metadata->tuning_params.tuning_mod3_data_size = 0;
5682        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5683                sizeof(uint32_t));
5684        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5685        data += sizeof(uint32_t);
5686
5687        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5688                TUNING_SENSOR_DATA_MAX);
5689        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5690                count);
5691        data += count;
5692
5693        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5694                TUNING_VFE_DATA_MAX);
5695        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5696                count);
5697        data += count;
5698
5699        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5700                TUNING_CPP_DATA_MAX);
5701        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5702                count);
5703        data += count;
5704
5705        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5706                TUNING_CAC_DATA_MAX);
5707        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5708                count);
5709        data += count;
5710
5711        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5712                (int32_t *)(void *)tuning_meta_data_blob,
5713                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5714    }
5715
5716    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5717            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5718        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5719                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5720                NEUTRAL_COL_POINTS);
5721    }
5722
5723    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5724        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5725        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5726    }
5727
5728    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5729        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5730        // Adjust crop region from sensor output coordinate system to active
5731        // array coordinate system.
5732        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5733                hAeRegions->rect.width, hAeRegions->rect.height);
5734
5735        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5736        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5737                REGIONS_TUPLE_COUNT);
5738        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5739                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5740                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5741                hAeRegions->rect.height);
5742    }
5743
5744    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5745        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5746        if (NAME_NOT_FOUND != val) {
5747            uint8_t fwkAfMode = (uint8_t)val;
5748            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5749            LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5750        } else {
5751            LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5752                    val);
5753        }
5754    }
5755
5756    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5757        uint8_t fwk_afState = (uint8_t) *afState;
5758        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5759        LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5760    }
5761
5762    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5763        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5764    }
5765
5766    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5767        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5768    }
5769
5770    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5771        uint8_t fwk_lensState = *lensState;
5772        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5773    }
5774
5775    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5776        /*af regions*/
5777        int32_t afRegions[REGIONS_TUPLE_COUNT];
5778        // Adjust crop region from sensor output coordinate system to active
5779        // array coordinate system.
5780        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5781                hAfRegions->rect.width, hAfRegions->rect.height);
5782
5783        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5784        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5785                REGIONS_TUPLE_COUNT);
5786        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5787                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5788                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5789                hAfRegions->rect.height);
5790    }
5791
5792    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5793        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5794                *hal_ab_mode);
5795        if (NAME_NOT_FOUND != val) {
5796            uint8_t fwk_ab_mode = (uint8_t)val;
5797            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5798        }
5799    }
5800
5801    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5802        int val = lookupFwkName(SCENE_MODES_MAP,
5803                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5804        if (NAME_NOT_FOUND != val) {
5805            uint8_t fwkBestshotMode = (uint8_t)val;
5806            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5807            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5808        } else {
5809            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5810        }
5811    }
5812
5813    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5814         uint8_t fwk_mode = (uint8_t) *mode;
5815         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5816    }
5817
5818    /* Constant metadata values to be update*/
5819    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5820    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5821
5822    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5823    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5824
5825    int32_t hotPixelMap[2];
5826    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5827
5828    // CDS
5829    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5830        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5831    }
5832
5833    // TNR
5834    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5835        uint8_t tnr_enable       = tnr->denoise_enable;
5836        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5837
5838        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5839        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5840    }
5841
5842    // Reprocess crop data
5843    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5844        uint8_t cnt = crop_data->num_of_streams;
5845        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5846            // mm-qcamera-daemon only posts crop_data for streams
5847            // not linked to pproc. So no valid crop metadata is not
5848            // necessarily an error case.
5849            LOGD("No valid crop metadata entries");
5850        } else {
5851            uint32_t reproc_stream_id;
5852            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5853                LOGD("No reprocessible stream found, ignore crop data");
5854            } else {
5855                int rc = NO_ERROR;
5856                Vector<int32_t> roi_map;
5857                int32_t *crop = new int32_t[cnt*4];
5858                if (NULL == crop) {
5859                   rc = NO_MEMORY;
5860                }
5861                if (NO_ERROR == rc) {
5862                    int32_t streams_found = 0;
5863                    for (size_t i = 0; i < cnt; i++) {
5864                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5865                            if (pprocDone) {
5866                                // HAL already does internal reprocessing,
5867                                // either via reprocessing before JPEG encoding,
5868                                // or offline postprocessing for pproc bypass case.
5869                                crop[0] = 0;
5870                                crop[1] = 0;
5871                                crop[2] = mInputStreamInfo.dim.width;
5872                                crop[3] = mInputStreamInfo.dim.height;
5873                            } else {
5874                                crop[0] = crop_data->crop_info[i].crop.left;
5875                                crop[1] = crop_data->crop_info[i].crop.top;
5876                                crop[2] = crop_data->crop_info[i].crop.width;
5877                                crop[3] = crop_data->crop_info[i].crop.height;
5878                            }
5879                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5880                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5881                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5882                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5883                            streams_found++;
5884                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5885                                    crop[0], crop[1], crop[2], crop[3]);
5886                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5887                                    crop_data->crop_info[i].roi_map.left,
5888                                    crop_data->crop_info[i].roi_map.top,
5889                                    crop_data->crop_info[i].roi_map.width,
5890                                    crop_data->crop_info[i].roi_map.height);
5891                            break;
5892
5893                       }
5894                    }
5895                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5896                            &streams_found, 1);
5897                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5898                            crop, (size_t)(streams_found * 4));
5899                    if (roi_map.array()) {
5900                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5901                                roi_map.array(), roi_map.size());
5902                    }
5903               }
5904               if (crop) {
5905                   delete [] crop;
5906               }
5907            }
5908        }
5909    }
5910
5911    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5912        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5913        // so hardcoding the CAC result to OFF mode.
5914        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5915        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5916    } else {
5917        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5918            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5919                    *cacMode);
5920            if (NAME_NOT_FOUND != val) {
5921                uint8_t resultCacMode = (uint8_t)val;
5922                // check whether CAC result from CB is equal to Framework set CAC mode
5923                // If not equal then set the CAC mode came in corresponding request
5924                if (fwk_cacMode != resultCacMode) {
5925                    resultCacMode = fwk_cacMode;
5926                }
5927                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5928                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5929            } else {
5930                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5931            }
5932        }
5933    }
5934
5935    // Post blob of cam_cds_data through vendor tag.
5936    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5937        uint8_t cnt = cdsInfo->num_of_streams;
5938        cam_cds_data_t cdsDataOverride;
5939        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5940        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5941        cdsDataOverride.num_of_streams = 1;
5942        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5943            uint32_t reproc_stream_id;
5944            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5945                LOGD("No reprocessible stream found, ignore cds data");
5946            } else {
5947                for (size_t i = 0; i < cnt; i++) {
5948                    if (cdsInfo->cds_info[i].stream_id ==
5949                            reproc_stream_id) {
5950                        cdsDataOverride.cds_info[0].cds_enable =
5951                                cdsInfo->cds_info[i].cds_enable;
5952                        break;
5953                    }
5954                }
5955            }
5956        } else {
5957            LOGD("Invalid stream count %d in CDS_DATA", cnt);
5958        }
5959        camMetadata.update(QCAMERA3_CDS_INFO,
5960                (uint8_t *)&cdsDataOverride,
5961                sizeof(cam_cds_data_t));
5962    }
5963
5964    // Ldaf calibration data
5965    if (!mLdafCalibExist) {
5966        IF_META_AVAILABLE(uint32_t, ldafCalib,
5967                CAM_INTF_META_LDAF_EXIF, metadata) {
5968            mLdafCalibExist = true;
5969            mLdafCalib[0] = ldafCalib[0];
5970            mLdafCalib[1] = ldafCalib[1];
5971            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5972                    ldafCalib[0], ldafCalib[1]);
5973        }
5974    }
5975
5976    // AF scene change
5977    IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
5978        camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
5979    }
5980
5981    /* In batch mode, cache the first metadata in the batch */
5982    if (mBatchSize && firstMetadataInBatch) {
5983        mCachedMetadata.clear();
5984        mCachedMetadata = camMetadata;
5985    }
5986
5987    resultMetadata = camMetadata.release();
5988    return resultMetadata;
5989}
5990
5991/*===========================================================================
5992 * FUNCTION   : saveExifParams
5993 *
5994 * DESCRIPTION:
5995 *
5996 * PARAMETERS :
5997 *   @metadata : metadata information from callback
5998 *
5999 * RETURN     : none
6000 *
6001 *==========================================================================*/
6002void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6003{
6004    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6005            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6006        if (mExifParams.debug_params) {
6007            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6008            mExifParams.debug_params->ae_debug_params_valid = TRUE;
6009        }
6010    }
6011    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6012            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6013        if (mExifParams.debug_params) {
6014            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6015            mExifParams.debug_params->awb_debug_params_valid = TRUE;
6016        }
6017    }
6018    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6019            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6020        if (mExifParams.debug_params) {
6021            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6022            mExifParams.debug_params->af_debug_params_valid = TRUE;
6023        }
6024    }
6025    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6026            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6027        if (mExifParams.debug_params) {
6028            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6029            mExifParams.debug_params->asd_debug_params_valid = TRUE;
6030        }
6031    }
6032    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6033            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6034        if (mExifParams.debug_params) {
6035            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6036            mExifParams.debug_params->stats_debug_params_valid = TRUE;
6037        }
6038    }
6039    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6040            CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6041        if (mExifParams.debug_params) {
6042            mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6043            mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6044        }
6045    }
6046    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6047            CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6048        if (mExifParams.debug_params) {
6049            mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6050            mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6051        }
6052    }
6053    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6054            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6055        if (mExifParams.debug_params) {
6056            mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6057            mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6058        }
6059    }
6060}
6061
6062/*===========================================================================
6063 * FUNCTION   : get3AExifParams
6064 *
6065 * DESCRIPTION:
6066 *
6067 * PARAMETERS : none
6068 *
6069 *
6070 * RETURN     : mm_jpeg_exif_params_t
6071 *
6072 *==========================================================================*/
6073mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6074{
6075    return mExifParams;
6076}
6077
6078/*===========================================================================
6079 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
6080 *
6081 * DESCRIPTION:
6082 *
6083 * PARAMETERS :
6084 *   @metadata : metadata information from callback
6085 *
6086 * RETURN     : camera_metadata_t*
6087 *              metadata in a format specified by fwk
6088 *==========================================================================*/
6089camera_metadata_t*
6090QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6091                                (metadata_buffer_t *metadata)
6092{
6093    CameraMetadata camMetadata;
6094    camera_metadata_t *resultMetadata;
6095
6096
6097    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6098        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6099        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6100        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6101    }
6102
6103    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6104        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6105                &aecTrigger->trigger, 1);
6106        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6107                &aecTrigger->trigger_id, 1);
6108        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6109                 aecTrigger->trigger);
6110        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6111                aecTrigger->trigger_id);
6112    }
6113
6114    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6115        uint8_t fwk_ae_state = (uint8_t) *ae_state;
6116        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6117        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6118    }
6119
6120    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6121        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6122                &af_trigger->trigger, 1);
6123        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6124                 af_trigger->trigger);
6125        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6126        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6127                af_trigger->trigger_id);
6128    }
6129
6130    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6131        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6132                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6133        if (NAME_NOT_FOUND != val) {
6134            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6135            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6136            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6137        } else {
6138            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6139        }
6140    }
6141
6142    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6143    uint32_t aeMode = CAM_AE_MODE_MAX;
6144    int32_t flashMode = CAM_FLASH_MODE_MAX;
6145    int32_t redeye = -1;
6146    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6147        aeMode = *pAeMode;
6148    }
6149    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6150        flashMode = *pFlashMode;
6151    }
6152    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6153        redeye = *pRedeye;
6154    }
6155
6156    if (1 == redeye) {
6157        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6158        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6159    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6160        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6161                flashMode);
6162        if (NAME_NOT_FOUND != val) {
6163            fwk_aeMode = (uint8_t)val;
6164            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6165        } else {
6166            LOGE("Unsupported flash mode %d", flashMode);
6167        }
6168    } else if (aeMode == CAM_AE_MODE_ON) {
6169        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6170        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6171    } else if (aeMode == CAM_AE_MODE_OFF) {
6172        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6173        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6174    } else {
6175        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6176              "flashMode:%d, aeMode:%u!!!",
6177                 redeye, flashMode, aeMode);
6178    }
6179
6180    resultMetadata = camMetadata.release();
6181    return resultMetadata;
6182}
6183
6184/*===========================================================================
6185 * FUNCTION   : dumpMetadataToFile
6186 *
6187 * DESCRIPTION: Dumps tuning metadata to file system
6188 *
6189 * PARAMETERS :
6190 *   @meta           : tuning metadata
6191 *   @dumpFrameCount : current dump frame count
6192 *   @enabled        : Enable mask
6193 *
6194 *==========================================================================*/
6195void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6196                                                   uint32_t &dumpFrameCount,
6197                                                   bool enabled,
6198                                                   const char *type,
6199                                                   uint32_t frameNumber)
6200{
6201    //Some sanity checks
6202    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6203        LOGE("Tuning sensor data size bigger than expected %d: %d",
6204              meta.tuning_sensor_data_size,
6205              TUNING_SENSOR_DATA_MAX);
6206        return;
6207    }
6208
6209    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6210        LOGE("Tuning VFE data size bigger than expected %d: %d",
6211              meta.tuning_vfe_data_size,
6212              TUNING_VFE_DATA_MAX);
6213        return;
6214    }
6215
6216    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6217        LOGE("Tuning CPP data size bigger than expected %d: %d",
6218              meta.tuning_cpp_data_size,
6219              TUNING_CPP_DATA_MAX);
6220        return;
6221    }
6222
6223    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6224        LOGE("Tuning CAC data size bigger than expected %d: %d",
6225              meta.tuning_cac_data_size,
6226              TUNING_CAC_DATA_MAX);
6227        return;
6228    }
6229    //
6230
6231    if(enabled){
6232        char timeBuf[FILENAME_MAX];
6233        char buf[FILENAME_MAX];
6234        memset(buf, 0, sizeof(buf));
6235        memset(timeBuf, 0, sizeof(timeBuf));
6236        time_t current_time;
6237        struct tm * timeinfo;
6238        time (&current_time);
6239        timeinfo = localtime (&current_time);
6240        if (timeinfo != NULL) {
6241            /* Consistent naming for Jpeg+meta+raw: meta name */
6242            strftime (timeBuf, sizeof(timeBuf),
6243                    QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6244            /* Consistent naming for Jpeg+meta+raw: meta name end*/
6245        }
6246        String8 filePath(timeBuf);
6247         /* Consistent naming for Jpeg+meta+raw */
6248        snprintf(buf,
6249                sizeof(buf),
6250                "%dm_%s_%d.bin",
6251                dumpFrameCount,
6252                type,
6253                frameNumber);
6254         /* Consistent naming for Jpeg+meta+raw end */
6255        filePath.append(buf);
6256        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6257        if (file_fd >= 0) {
6258            ssize_t written_len = 0;
6259            meta.tuning_data_version = TUNING_DATA_VERSION;
6260            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6261            written_len += write(file_fd, data, sizeof(uint32_t));
6262            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6263            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6264            written_len += write(file_fd, data, sizeof(uint32_t));
6265            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6266            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6267            written_len += write(file_fd, data, sizeof(uint32_t));
6268            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6269            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6270            written_len += write(file_fd, data, sizeof(uint32_t));
6271            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6272            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6273            written_len += write(file_fd, data, sizeof(uint32_t));
6274            meta.tuning_mod3_data_size = 0;
6275            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6276            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6277            written_len += write(file_fd, data, sizeof(uint32_t));
6278            size_t total_size = meta.tuning_sensor_data_size;
6279            data = (void *)((uint8_t *)&meta.data);
6280            written_len += write(file_fd, data, total_size);
6281            total_size = meta.tuning_vfe_data_size;
6282            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6283            written_len += write(file_fd, data, total_size);
6284            total_size = meta.tuning_cpp_data_size;
6285            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6286            written_len += write(file_fd, data, total_size);
6287            total_size = meta.tuning_cac_data_size;
6288            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6289            written_len += write(file_fd, data, total_size);
6290            close(file_fd);
6291        }else {
6292            LOGE("fail to open file for metadata dumping");
6293        }
6294    }
6295}
6296
6297/*===========================================================================
6298 * FUNCTION   : cleanAndSortStreamInfo
6299 *
6300 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6301 *              and sort them such that raw stream is at the end of the list
6302 *              This is a workaround for camera daemon constraint.
6303 *
6304 * PARAMETERS : None
6305 *
6306 *==========================================================================*/
6307void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6308{
6309    List<stream_info_t *> newStreamInfo;
6310
6311    /*clean up invalid streams*/
6312    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6313            it != mStreamInfo.end();) {
6314        if(((*it)->status) == INVALID){
6315            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6316            delete channel;
6317            free(*it);
6318            it = mStreamInfo.erase(it);
6319        } else {
6320            it++;
6321        }
6322    }
6323
6324    // Move preview/video/callback/snapshot streams into newList
6325    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6326            it != mStreamInfo.end();) {
6327        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6328                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6329                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6330            newStreamInfo.push_back(*it);
6331            it = mStreamInfo.erase(it);
6332        } else
6333            it++;
6334    }
6335    // Move raw streams into newList
6336    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6337            it != mStreamInfo.end();) {
6338        newStreamInfo.push_back(*it);
6339        it = mStreamInfo.erase(it);
6340    }
6341
6342    mStreamInfo = newStreamInfo;
6343}
6344
6345/*===========================================================================
6346 * FUNCTION   : extractJpegMetadata
6347 *
6348 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6349 *              JPEG metadata is cached in HAL, and return as part of capture
6350 *              result when metadata is returned from camera daemon.
6351 *
6352 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6353 *              @request:      capture request
6354 *
6355 *==========================================================================*/
6356void QCamera3HardwareInterface::extractJpegMetadata(
6357        CameraMetadata& jpegMetadata,
6358        const camera3_capture_request_t *request)
6359{
6360    CameraMetadata frame_settings;
6361    frame_settings = request->settings;
6362
6363    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6364        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6365                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6366                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6367
6368    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6369        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6370                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6371                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6372
6373    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6374        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6375                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6376                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6377
6378    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6379        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6380                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6381                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6382
6383    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6384        jpegMetadata.update(ANDROID_JPEG_QUALITY,
6385                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6386                frame_settings.find(ANDROID_JPEG_QUALITY).count);
6387
6388    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6389        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6390                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6391                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6392
6393    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6394        int32_t thumbnail_size[2];
6395        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6396        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6397        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6398            int32_t orientation =
6399                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6400            if ((orientation == 90) || (orientation == 270)) {
6401               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6402               int32_t temp;
6403               temp = thumbnail_size[0];
6404               thumbnail_size[0] = thumbnail_size[1];
6405               thumbnail_size[1] = temp;
6406            }
6407         }
6408         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6409                thumbnail_size,
6410                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6411    }
6412
6413}
6414
6415/*===========================================================================
6416 * FUNCTION   : convertToRegions
6417 *
6418 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6419 *
6420 * PARAMETERS :
6421 *   @rect   : cam_rect_t struct to convert
6422 *   @region : int32_t destination array
6423 *   @weight : if we are converting from cam_area_t, weight is valid
6424 *             else weight = -1
6425 *
6426 *==========================================================================*/
6427void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6428        int32_t *region, int weight)
6429{
6430    region[0] = rect.left;
6431    region[1] = rect.top;
6432    region[2] = rect.left + rect.width;
6433    region[3] = rect.top + rect.height;
6434    if (weight > -1) {
6435        region[4] = weight;
6436    }
6437}
6438
6439/*===========================================================================
6440 * FUNCTION   : convertFromRegions
6441 *
6442 * DESCRIPTION: helper method to convert from array to cam_rect_t
6443 *
6444 * PARAMETERS :
6445 *   @rect   : cam_rect_t struct to convert
6446 *   @region : int32_t destination array
6447 *   @weight : if we are converting from cam_area_t, weight is valid
6448 *             else weight = -1
6449 *
6450 *==========================================================================*/
6451void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6452        const camera_metadata_t *settings, uint32_t tag)
6453{
6454    CameraMetadata frame_settings;
6455    frame_settings = settings;
6456    int32_t x_min = frame_settings.find(tag).data.i32[0];
6457    int32_t y_min = frame_settings.find(tag).data.i32[1];
6458    int32_t x_max = frame_settings.find(tag).data.i32[2];
6459    int32_t y_max = frame_settings.find(tag).data.i32[3];
6460    roi.weight = frame_settings.find(tag).data.i32[4];
6461    roi.rect.left = x_min;
6462    roi.rect.top = y_min;
6463    roi.rect.width = x_max - x_min;
6464    roi.rect.height = y_max - y_min;
6465}
6466
6467/*===========================================================================
6468 * FUNCTION   : resetIfNeededROI
6469 *
6470 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6471 *              crop region
6472 *
6473 * PARAMETERS :
6474 *   @roi       : cam_area_t struct to resize
6475 *   @scalerCropRegion : cam_crop_region_t region to compare against
6476 *
6477 *
6478 *==========================================================================*/
6479bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6480                                                 const cam_crop_region_t* scalerCropRegion)
6481{
6482    int32_t roi_x_max = roi->rect.width + roi->rect.left;
6483    int32_t roi_y_max = roi->rect.height + roi->rect.top;
6484    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6485    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6486
6487    /* According to spec weight = 0 is used to indicate roi needs to be disabled
6488     * without having this check the calculations below to validate if the roi
6489     * is inside scalar crop region will fail resulting in the roi not being
6490     * reset causing algorithm to continue to use stale roi window
6491     */
6492    if (roi->weight == 0) {
6493        return true;
6494    }
6495
6496    if ((roi_x_max < scalerCropRegion->left) ||
6497        // right edge of roi window is left of scalar crop's left edge
6498        (roi_y_max < scalerCropRegion->top)  ||
6499        // bottom edge of roi window is above scalar crop's top edge
6500        (roi->rect.left > crop_x_max) ||
6501        // left edge of roi window is beyond(right) of scalar crop's right edge
6502        (roi->rect.top > crop_y_max)){
6503        // top edge of roi windo is above scalar crop's top edge
6504        return false;
6505    }
6506    if (roi->rect.left < scalerCropRegion->left) {
6507        roi->rect.left = scalerCropRegion->left;
6508    }
6509    if (roi->rect.top < scalerCropRegion->top) {
6510        roi->rect.top = scalerCropRegion->top;
6511    }
6512    if (roi_x_max > crop_x_max) {
6513        roi_x_max = crop_x_max;
6514    }
6515    if (roi_y_max > crop_y_max) {
6516        roi_y_max = crop_y_max;
6517    }
6518    roi->rect.width = roi_x_max - roi->rect.left;
6519    roi->rect.height = roi_y_max - roi->rect.top;
6520    return true;
6521}
6522
6523/*===========================================================================
6524 * FUNCTION   : convertLandmarks
6525 *
6526 * DESCRIPTION: helper method to extract the landmarks from face detection info
6527 *
6528 * PARAMETERS :
6529 *   @landmark_data : input landmark data to be converted
6530 *   @landmarks : int32_t destination array
6531 *
6532 *
6533 *==========================================================================*/
6534void QCamera3HardwareInterface::convertLandmarks(
6535        cam_face_landmarks_info_t landmark_data,
6536        int32_t *landmarks)
6537{
6538    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6539    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6540    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6541    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6542    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6543    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6544}
6545
6546#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6547/*===========================================================================
6548 * FUNCTION   : initCapabilities
6549 *
6550 * DESCRIPTION: initialize camera capabilities in static data struct
6551 *
6552 * PARAMETERS :
6553 *   @cameraId  : camera Id
6554 *
6555 * RETURN     : int32_t type of status
6556 *              NO_ERROR  -- success
6557 *              none-zero failure code
6558 *==========================================================================*/
6559int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6560{
6561    int rc = 0;
6562    mm_camera_vtbl_t *cameraHandle = NULL;
6563    QCamera3HeapMemory *capabilityHeap = NULL;
6564
6565    rc = camera_open((uint8_t)cameraId, &cameraHandle);
6566    if (rc) {
6567        LOGE("camera_open failed. rc = %d", rc);
6568        goto open_failed;
6569    }
6570    if (!cameraHandle) {
6571        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6572        goto open_failed;
6573    }
6574
6575    capabilityHeap = new QCamera3HeapMemory(1);
6576    if (capabilityHeap == NULL) {
6577        LOGE("creation of capabilityHeap failed");
6578        goto heap_creation_failed;
6579    }
6580    /* Allocate memory for capability buffer */
6581    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6582    if(rc != OK) {
6583        LOGE("No memory for cappability");
6584        goto allocate_failed;
6585    }
6586
6587    /* Map memory for capability buffer */
6588    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6589    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6590                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
6591                                capabilityHeap->getFd(0),
6592                                sizeof(cam_capability_t),
6593                                capabilityHeap->getPtr(0));
6594    if(rc < 0) {
6595        LOGE("failed to map capability buffer");
6596        goto map_failed;
6597    }
6598
6599    /* Query Capability */
6600    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6601    if(rc < 0) {
6602        LOGE("failed to query capability");
6603        goto query_failed;
6604    }
6605    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6606    if (!gCamCapability[cameraId]) {
6607        LOGE("out of memory");
6608        goto query_failed;
6609    }
6610    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6611                                        sizeof(cam_capability_t));
6612
6613    int index;
6614    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6615        cam_analysis_info_t *p_analysis_info =
6616                &gCamCapability[cameraId]->analysis_info[index];
6617        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6618        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6619    }
6620    rc = 0;
6621
6622query_failed:
6623    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6624                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
6625map_failed:
6626    capabilityHeap->deallocate();
6627allocate_failed:
6628    delete capabilityHeap;
6629heap_creation_failed:
6630    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6631    cameraHandle = NULL;
6632open_failed:
6633    return rc;
6634}
6635
6636/*==========================================================================
6637 * FUNCTION   : get3Aversion
6638 *
6639 * DESCRIPTION: get the Q3A S/W version
6640 *
6641 * PARAMETERS :
6642 *  @sw_version: Reference of Q3A structure which will hold version info upon
6643 *               return
6644 *
6645 * RETURN     : None
6646 *
6647 *==========================================================================*/
6648void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6649{
6650    if(gCamCapability[mCameraId])
6651        sw_version = gCamCapability[mCameraId]->q3a_version;
6652    else
6653        LOGE("Capability structure NULL!");
6654}
6655
6656
6657/*===========================================================================
6658 * FUNCTION   : initParameters
6659 *
6660 * DESCRIPTION: initialize camera parameters
6661 *
6662 * PARAMETERS :
6663 *
6664 * RETURN     : int32_t type of status
6665 *              NO_ERROR  -- success
6666 *              none-zero failure code
6667 *==========================================================================*/
6668int QCamera3HardwareInterface::initParameters()
6669{
6670    int rc = 0;
6671
6672    //Allocate Set Param Buffer
6673    mParamHeap = new QCamera3HeapMemory(1);
6674    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6675    if(rc != OK) {
6676        rc = NO_MEMORY;
6677        LOGE("Failed to allocate SETPARM Heap memory");
6678        delete mParamHeap;
6679        mParamHeap = NULL;
6680        return rc;
6681    }
6682
6683    //Map memory for parameters buffer
6684    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6685            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6686            mParamHeap->getFd(0),
6687            sizeof(metadata_buffer_t),
6688            (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6689    if(rc < 0) {
6690        LOGE("failed to map SETPARM buffer");
6691        rc = FAILED_TRANSACTION;
6692        mParamHeap->deallocate();
6693        delete mParamHeap;
6694        mParamHeap = NULL;
6695        return rc;
6696    }
6697
6698    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6699
6700    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6701    return rc;
6702}
6703
6704/*===========================================================================
6705 * FUNCTION   : deinitParameters
6706 *
6707 * DESCRIPTION: de-initialize camera parameters
6708 *
6709 * PARAMETERS :
6710 *
6711 * RETURN     : NONE
6712 *==========================================================================*/
6713void QCamera3HardwareInterface::deinitParameters()
6714{
6715    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6716            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6717
6718    mParamHeap->deallocate();
6719    delete mParamHeap;
6720    mParamHeap = NULL;
6721
6722    mParameters = NULL;
6723
6724    free(mPrevParameters);
6725    mPrevParameters = NULL;
6726}
6727
6728/*===========================================================================
6729 * FUNCTION   : calcMaxJpegSize
6730 *
6731 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6732 *
6733 * PARAMETERS :
6734 *
6735 * RETURN     : max_jpeg_size
6736 *==========================================================================*/
6737size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6738{
6739    size_t max_jpeg_size = 0;
6740    size_t temp_width, temp_height;
6741    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6742            MAX_SIZES_CNT);
6743    for (size_t i = 0; i < count; i++) {
6744        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6745        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6746        if (temp_width * temp_height > max_jpeg_size ) {
6747            max_jpeg_size = temp_width * temp_height;
6748        }
6749    }
6750    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6751    return max_jpeg_size;
6752}
6753
6754/*===========================================================================
6755 * FUNCTION   : getMaxRawSize
6756 *
6757 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6758 *
6759 * PARAMETERS :
6760 *
6761 * RETURN     : Largest supported Raw Dimension
6762 *==========================================================================*/
6763cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6764{
6765    int max_width = 0;
6766    cam_dimension_t maxRawSize;
6767
6768    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6769    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6770        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6771            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6772            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6773        }
6774    }
6775    return maxRawSize;
6776}
6777
6778
6779/*===========================================================================
6780 * FUNCTION   : calcMaxJpegDim
6781 *
6782 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6783 *
6784 * PARAMETERS :
6785 *
6786 * RETURN     : max_jpeg_dim
6787 *==========================================================================*/
6788cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6789{
6790    cam_dimension_t max_jpeg_dim;
6791    cam_dimension_t curr_jpeg_dim;
6792    max_jpeg_dim.width = 0;
6793    max_jpeg_dim.height = 0;
6794    curr_jpeg_dim.width = 0;
6795    curr_jpeg_dim.height = 0;
6796    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6797        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6798        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6799        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6800            max_jpeg_dim.width * max_jpeg_dim.height ) {
6801            max_jpeg_dim.width = curr_jpeg_dim.width;
6802            max_jpeg_dim.height = curr_jpeg_dim.height;
6803        }
6804    }
6805    return max_jpeg_dim;
6806}
6807
6808/*===========================================================================
6809 * FUNCTION   : addStreamConfig
6810 *
6811 * DESCRIPTION: adds the stream configuration to the array
6812 *
6813 * PARAMETERS :
6814 * @available_stream_configs : pointer to stream configuration array
6815 * @scalar_format            : scalar format
6816 * @dim                      : configuration dimension
6817 * @config_type              : input or output configuration type
6818 *
6819 * RETURN     : NONE
6820 *==========================================================================*/
6821void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6822        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6823{
6824    available_stream_configs.add(scalar_format);
6825    available_stream_configs.add(dim.width);
6826    available_stream_configs.add(dim.height);
6827    available_stream_configs.add(config_type);
6828}
6829
6830/*===========================================================================
6831 * FUNCTION   : suppportBurstCapture
6832 *
6833 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6834 *
6835 * PARAMETERS :
6836 *   @cameraId  : camera Id
6837 *
6838 * RETURN     : true if camera supports BURST_CAPTURE
6839 *              false otherwise
6840 *==========================================================================*/
6841bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6842{
6843    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6844    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6845    const int32_t highResWidth = 3264;
6846    const int32_t highResHeight = 2448;
6847
6848    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6849        // Maximum resolution images cannot be captured at >= 10fps
6850        // -> not supporting BURST_CAPTURE
6851        return false;
6852    }
6853
6854    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6855        // Maximum resolution images can be captured at >= 20fps
6856        // --> supporting BURST_CAPTURE
6857        return true;
6858    }
6859
6860    // Find the smallest highRes resolution, or largest resolution if there is none
6861    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6862            MAX_SIZES_CNT);
6863    size_t highRes = 0;
6864    while ((highRes + 1 < totalCnt) &&
6865            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6866            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6867            highResWidth * highResHeight)) {
6868        highRes++;
6869    }
6870    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6871        return true;
6872    } else {
6873        return false;
6874    }
6875}
6876
6877/*===========================================================================
6878 * FUNCTION   : initStaticMetadata
6879 *
6880 * DESCRIPTION: initialize the static metadata
6881 *
6882 * PARAMETERS :
6883 *   @cameraId  : camera Id
6884 *
6885 * RETURN     : int32_t type of status
6886 *              0  -- success
6887 *              non-zero failure code
6888 *==========================================================================*/
6889int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6890{
6891    int rc = 0;
6892    CameraMetadata staticInfo;
6893    size_t count = 0;
6894    bool limitedDevice = false;
6895    char prop[PROPERTY_VALUE_MAX];
6896    bool supportBurst = false;
6897
6898    supportBurst = supportBurstCapture(cameraId);
6899
6900    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6901     * guaranteed or if min fps of max resolution is less than 20 fps, its
6902     * advertised as limited device*/
6903    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6904            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6905            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6906            !supportBurst;
6907
6908    uint8_t supportedHwLvl = limitedDevice ?
6909            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6910            // LEVEL_3 - This device will support level 3.
6911            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6912
6913    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6914            &supportedHwLvl, 1);
6915
6916    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6917    /*HAL 3 only*/
6918    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6919                    &gCamCapability[cameraId]->min_focus_distance, 1);
6920
6921    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6922                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6923
6924    /*should be using focal lengths but sensor doesn't provide that info now*/
6925    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6926                      &gCamCapability[cameraId]->focal_length,
6927                      1);
6928
6929    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6930            gCamCapability[cameraId]->apertures,
6931            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6932
6933    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6934            gCamCapability[cameraId]->filter_densities,
6935            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6936
6937
6938    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6939            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6940            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6941
6942    int32_t lens_shading_map_size[] = {
6943            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6944            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6945    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6946                      lens_shading_map_size,
6947                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6948
6949    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6950            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6951
6952    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6953            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6954
6955    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6956            &gCamCapability[cameraId]->max_frame_duration, 1);
6957
6958    camera_metadata_rational baseGainFactor = {
6959            gCamCapability[cameraId]->base_gain_factor.numerator,
6960            gCamCapability[cameraId]->base_gain_factor.denominator};
6961    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6962                      &baseGainFactor, 1);
6963
6964    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6965                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6966
6967    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6968            gCamCapability[cameraId]->pixel_array_size.height};
6969    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6970                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6971
6972    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6973            gCamCapability[cameraId]->active_array_size.top,
6974            gCamCapability[cameraId]->active_array_size.width,
6975            gCamCapability[cameraId]->active_array_size.height};
6976    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6977            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6978
6979    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6980            &gCamCapability[cameraId]->white_level, 1);
6981
6982    int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
6983    adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
6984            gCamCapability[cameraId]->color_arrangement);
6985    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6986            adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
6987
6988    bool hasBlackRegions = false;
6989    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6990        LOGW("black_region_count: %d is bounded to %d",
6991            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6992        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6993    }
6994    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6995        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6996        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6997            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6998        }
6999        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7000                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7001        hasBlackRegions = true;
7002    }
7003
7004    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7005            &gCamCapability[cameraId]->flash_charge_duration, 1);
7006
7007    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7008            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7009
7010    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
7011    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7012            &timestampSource, 1);
7013
7014    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7015            &gCamCapability[cameraId]->histogram_size, 1);
7016
7017    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7018            &gCamCapability[cameraId]->max_histogram_count, 1);
7019
7020    int32_t sharpness_map_size[] = {
7021            gCamCapability[cameraId]->sharpness_map_size.width,
7022            gCamCapability[cameraId]->sharpness_map_size.height};
7023
7024    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7025            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7026
7027    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7028            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7029
7030    int32_t scalar_formats[] = {
7031            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7032            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7033            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7034            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7035            HAL_PIXEL_FORMAT_RAW10,
7036            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7037    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7038    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7039                      scalar_formats,
7040                      scalar_formats_count);
7041
7042    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7043    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7044    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7045            count, MAX_SIZES_CNT, available_processed_sizes);
7046    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7047            available_processed_sizes, count * 2);
7048
7049    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7050    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7051    makeTable(gCamCapability[cameraId]->raw_dim,
7052            count, MAX_SIZES_CNT, available_raw_sizes);
7053    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7054            available_raw_sizes, count * 2);
7055
7056    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7057    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7058    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7059            count, MAX_SIZES_CNT, available_fps_ranges);
7060    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7061            available_fps_ranges, count * 2);
7062
7063    camera_metadata_rational exposureCompensationStep = {
7064            gCamCapability[cameraId]->exp_compensation_step.numerator,
7065            gCamCapability[cameraId]->exp_compensation_step.denominator};
7066    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7067                      &exposureCompensationStep, 1);
7068
7069    Vector<uint8_t> availableVstabModes;
7070    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7071    char eis_prop[PROPERTY_VALUE_MAX];
7072    memset(eis_prop, 0, sizeof(eis_prop));
7073    property_get("persist.camera.eis.enable", eis_prop, "0");
7074    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7075    if (facingBack && eis_prop_set) {
7076        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7077    }
7078    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7079                      availableVstabModes.array(), availableVstabModes.size());
7080
7081    /*HAL 1 and HAL 3 common*/
7082    float maxZoom = 4;
7083    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7084            &maxZoom, 1);
7085
7086    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7087    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7088
7089    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7090    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7091        max3aRegions[2] = 0; /* AF not supported */
7092    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7093            max3aRegions, 3);
7094
7095    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7096    memset(prop, 0, sizeof(prop));
7097    property_get("persist.camera.facedetect", prop, "1");
7098    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7099    LOGD("Support face detection mode: %d",
7100             supportedFaceDetectMode);
7101
7102    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
7103    Vector<uint8_t> availableFaceDetectModes;
7104    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7105    if (supportedFaceDetectMode == 1) {
7106        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7107    } else if (supportedFaceDetectMode == 2) {
7108        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7109    } else if (supportedFaceDetectMode == 3) {
7110        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7111        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7112    } else {
7113        maxFaces = 0;
7114    }
7115    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7116            availableFaceDetectModes.array(),
7117            availableFaceDetectModes.size());
7118    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7119            (int32_t *)&maxFaces, 1);
7120
7121    int32_t exposureCompensationRange[] = {
7122            gCamCapability[cameraId]->exposure_compensation_min,
7123            gCamCapability[cameraId]->exposure_compensation_max};
7124    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7125            exposureCompensationRange,
7126            sizeof(exposureCompensationRange)/sizeof(int32_t));
7127
7128    uint8_t lensFacing = (facingBack) ?
7129            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7130    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7131
7132    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7133                      available_thumbnail_sizes,
7134                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7135
7136    /*all sizes will be clubbed into this tag*/
7137    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7138    /*android.scaler.availableStreamConfigurations*/
7139    Vector<int32_t> available_stream_configs;
7140    cam_dimension_t active_array_dim;
7141    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7142    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7143    /* Add input/output stream configurations for each scalar formats*/
7144    for (size_t j = 0; j < scalar_formats_count; j++) {
7145        switch (scalar_formats[j]) {
7146        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7147        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7148        case HAL_PIXEL_FORMAT_RAW10:
7149            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7150                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7151                addStreamConfig(available_stream_configs, scalar_formats[j],
7152                        gCamCapability[cameraId]->raw_dim[i],
7153                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7154            }
7155            break;
7156        case HAL_PIXEL_FORMAT_BLOB:
7157            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7158                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7159                addStreamConfig(available_stream_configs, scalar_formats[j],
7160                        gCamCapability[cameraId]->picture_sizes_tbl[i],
7161                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7162            }
7163            break;
7164        case HAL_PIXEL_FORMAT_YCbCr_420_888:
7165        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7166        default:
7167            cam_dimension_t largest_picture_size;
7168            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7169            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7170                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7171                addStreamConfig(available_stream_configs, scalar_formats[j],
7172                        gCamCapability[cameraId]->picture_sizes_tbl[i],
7173                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7174                /* Book keep largest */
7175                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7176                        >= largest_picture_size.width &&
7177                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
7178                        >= largest_picture_size.height)
7179                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7180            }
7181            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7182            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7183                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7184                 addStreamConfig(available_stream_configs, scalar_formats[j],
7185                         largest_picture_size,
7186                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7187            }
7188            break;
7189        }
7190    }
7191
7192    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7193                      available_stream_configs.array(), available_stream_configs.size());
7194    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7195    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7196
7197    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7198    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7199
7200    /* android.scaler.availableMinFrameDurations */
7201    Vector<int64_t> available_min_durations;
7202    for (size_t j = 0; j < scalar_formats_count; j++) {
7203        switch (scalar_formats[j]) {
7204        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7205        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7206        case HAL_PIXEL_FORMAT_RAW10:
7207            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7208                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7209                available_min_durations.add(scalar_formats[j]);
7210                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7211                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7212                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7213            }
7214            break;
7215        default:
7216            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7217                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7218                available_min_durations.add(scalar_formats[j]);
7219                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7220                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7221                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7222            }
7223            break;
7224        }
7225    }
7226    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7227                      available_min_durations.array(), available_min_durations.size());
7228
7229    Vector<int32_t> available_hfr_configs;
7230    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7231        int32_t fps = 0;
7232        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7233        case CAM_HFR_MODE_60FPS:
7234            fps = 60;
7235            break;
7236        case CAM_HFR_MODE_90FPS:
7237            fps = 90;
7238            break;
7239        case CAM_HFR_MODE_120FPS:
7240            fps = 120;
7241            break;
7242        case CAM_HFR_MODE_150FPS:
7243            fps = 150;
7244            break;
7245        case CAM_HFR_MODE_180FPS:
7246            fps = 180;
7247            break;
7248        case CAM_HFR_MODE_210FPS:
7249            fps = 210;
7250            break;
7251        case CAM_HFR_MODE_240FPS:
7252            fps = 240;
7253            break;
7254        case CAM_HFR_MODE_480FPS:
7255            fps = 480;
7256            break;
7257        case CAM_HFR_MODE_OFF:
7258        case CAM_HFR_MODE_MAX:
7259        default:
7260            break;
7261        }
7262
7263        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7264        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7265            /* For each HFR frame rate, need to advertise one variable fps range
7266             * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7267             * and [120, 120]. While camcorder preview alone is running [30, 120] is
7268             * set by the app. When video recording is started, [120, 120] is
7269             * set. This way sensor configuration does not change when recording
7270             * is started */
7271
7272            /* (width, height, fps_min, fps_max, batch_size_max) */
7273            for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7274                j < MAX_SIZES_CNT; j++) {
7275                available_hfr_configs.add(
7276                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7277                available_hfr_configs.add(
7278                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7279                available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7280                available_hfr_configs.add(fps);
7281                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7282
7283                /* (width, height, fps_min, fps_max, batch_size_max) */
7284                available_hfr_configs.add(
7285                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7286                available_hfr_configs.add(
7287                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7288                available_hfr_configs.add(fps);
7289                available_hfr_configs.add(fps);
7290                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7291            }
7292       }
7293    }
7294    //Advertise HFR capability only if the property is set
7295    memset(prop, 0, sizeof(prop));
7296    property_get("persist.camera.hal3hfr.enable", prop, "1");
7297    uint8_t hfrEnable = (uint8_t)atoi(prop);
7298
7299    if(hfrEnable && available_hfr_configs.array()) {
7300        staticInfo.update(
7301                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7302                available_hfr_configs.array(), available_hfr_configs.size());
7303    }
7304
7305    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7306    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7307                      &max_jpeg_size, 1);
7308
7309    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7310    size_t size = 0;
7311    count = CAM_EFFECT_MODE_MAX;
7312    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7313    for (size_t i = 0; i < count; i++) {
7314        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7315                gCamCapability[cameraId]->supported_effects[i]);
7316        if (NAME_NOT_FOUND != val) {
7317            avail_effects[size] = (uint8_t)val;
7318            size++;
7319        }
7320    }
7321    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7322                      avail_effects,
7323                      size);
7324
7325    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7326    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7327    size_t supported_scene_modes_cnt = 0;
7328    count = CAM_SCENE_MODE_MAX;
7329    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7330    for (size_t i = 0; i < count; i++) {
7331        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7332                CAM_SCENE_MODE_OFF) {
7333            int val = lookupFwkName(SCENE_MODES_MAP,
7334                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
7335                    gCamCapability[cameraId]->supported_scene_modes[i]);
7336            if (NAME_NOT_FOUND != val) {
7337                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7338                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7339                supported_scene_modes_cnt++;
7340            }
7341        }
7342    }
7343    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7344                      avail_scene_modes,
7345                      supported_scene_modes_cnt);
7346
7347    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
7348    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7349                      supported_scene_modes_cnt,
7350                      CAM_SCENE_MODE_MAX,
7351                      scene_mode_overrides,
7352                      supported_indexes,
7353                      cameraId);
7354
7355    if (supported_scene_modes_cnt == 0) {
7356        supported_scene_modes_cnt = 1;
7357        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7358    }
7359
7360    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7361            scene_mode_overrides, supported_scene_modes_cnt * 3);
7362
7363    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7364                                         ANDROID_CONTROL_MODE_AUTO,
7365                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7366    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7367            available_control_modes,
7368            3);
7369
7370    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7371    size = 0;
7372    count = CAM_ANTIBANDING_MODE_MAX;
7373    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7374    for (size_t i = 0; i < count; i++) {
7375        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7376                gCamCapability[cameraId]->supported_antibandings[i]);
7377        if (NAME_NOT_FOUND != val) {
7378            avail_antibanding_modes[size] = (uint8_t)val;
7379            size++;
7380        }
7381
7382    }
7383    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7384                      avail_antibanding_modes,
7385                      size);
7386
7387    uint8_t avail_abberation_modes[] = {
7388            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7389            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7390            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7391    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7392    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7393    if (0 == count) {
7394        //  If no aberration correction modes are available for a device, this advertise OFF mode
7395        size = 1;
7396    } else {
7397        // If count is not zero then atleast one among the FAST or HIGH quality is supported
7398        // So, advertize all 3 modes if atleast any one mode is supported as per the
7399        // new M requirement
7400        size = 3;
7401    }
7402    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7403            avail_abberation_modes,
7404            size);
7405
7406    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7407    size = 0;
7408    count = CAM_FOCUS_MODE_MAX;
7409    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7410    for (size_t i = 0; i < count; i++) {
7411        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7412                gCamCapability[cameraId]->supported_focus_modes[i]);
7413        if (NAME_NOT_FOUND != val) {
7414            avail_af_modes[size] = (uint8_t)val;
7415            size++;
7416        }
7417    }
7418    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7419                      avail_af_modes,
7420                      size);
7421
7422    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7423    size = 0;
7424    count = CAM_WB_MODE_MAX;
7425    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7426    for (size_t i = 0; i < count; i++) {
7427        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7428                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7429                gCamCapability[cameraId]->supported_white_balances[i]);
7430        if (NAME_NOT_FOUND != val) {
7431            avail_awb_modes[size] = (uint8_t)val;
7432            size++;
7433        }
7434    }
7435    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7436                      avail_awb_modes,
7437                      size);
7438
7439    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7440    count = CAM_FLASH_FIRING_LEVEL_MAX;
7441    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7442            count);
7443    for (size_t i = 0; i < count; i++) {
7444        available_flash_levels[i] =
7445                gCamCapability[cameraId]->supported_firing_levels[i];
7446    }
7447    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7448            available_flash_levels, count);
7449
7450    uint8_t flashAvailable;
7451    if (gCamCapability[cameraId]->flash_available)
7452        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7453    else
7454        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7455    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7456            &flashAvailable, 1);
7457
7458    Vector<uint8_t> avail_ae_modes;
7459    count = CAM_AE_MODE_MAX;
7460    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7461    for (size_t i = 0; i < count; i++) {
7462        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7463    }
7464    if (flashAvailable) {
7465        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7466        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7467    }
7468    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7469                      avail_ae_modes.array(),
7470                      avail_ae_modes.size());
7471
7472    int32_t sensitivity_range[2];
7473    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7474    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7475    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7476                      sensitivity_range,
7477                      sizeof(sensitivity_range) / sizeof(int32_t));
7478
7479    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7480                      &gCamCapability[cameraId]->max_analog_sensitivity,
7481                      1);
7482
7483    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7484    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7485                      &sensor_orientation,
7486                      1);
7487
7488    int32_t max_output_streams[] = {
7489            MAX_STALLING_STREAMS,
7490            MAX_PROCESSED_STREAMS,
7491            MAX_RAW_STREAMS};
7492    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7493            max_output_streams,
7494            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7495
7496    uint8_t avail_leds = 0;
7497    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7498                      &avail_leds, 0);
7499
7500    uint8_t focus_dist_calibrated;
7501    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7502            gCamCapability[cameraId]->focus_dist_calibrated);
7503    if (NAME_NOT_FOUND != val) {
7504        focus_dist_calibrated = (uint8_t)val;
7505        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7506                     &focus_dist_calibrated, 1);
7507    }
7508
7509    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7510    size = 0;
7511    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7512            MAX_TEST_PATTERN_CNT);
7513    for (size_t i = 0; i < count; i++) {
7514        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7515                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7516        if (NAME_NOT_FOUND != testpatternMode) {
7517            avail_testpattern_modes[size] = testpatternMode;
7518            size++;
7519        }
7520    }
7521    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7522                      avail_testpattern_modes,
7523                      size);
7524
7525    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7526    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7527                      &max_pipeline_depth,
7528                      1);
7529
7530    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7531    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7532                      &partial_result_count,
7533                       1);
7534
7535    int32_t max_stall_duration = MAX_REPROCESS_STALL;
7536    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7537
7538    Vector<uint8_t> available_capabilities;
7539    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7540    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7541    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7542    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7543    if (supportBurst) {
7544        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7545    }
7546    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7547    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7548    if (hfrEnable && available_hfr_configs.array()) {
7549        available_capabilities.add(
7550                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7551    }
7552
7553    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7554        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7555    }
7556    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7557            available_capabilities.array(),
7558            available_capabilities.size());
7559
7560    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7561    //Assumption is that all bayer cameras support MANUAL_SENSOR.
7562    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7563            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7564
7565    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7566            &aeLockAvailable, 1);
7567
7568    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7569    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7570    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7571            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7572
7573    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7574            &awbLockAvailable, 1);
7575
7576    int32_t max_input_streams = 1;
7577    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7578                      &max_input_streams,
7579                      1);
7580
7581    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7582    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7583            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7584            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7585            HAL_PIXEL_FORMAT_YCbCr_420_888};
7586    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7587                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7588
7589    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7590    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7591                      &max_latency,
7592                      1);
7593
7594    int32_t isp_sensitivity_range[2];
7595    isp_sensitivity_range[0] =
7596        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7597    isp_sensitivity_range[1] =
7598        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7599    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7600                      isp_sensitivity_range,
7601                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7602
7603    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7604                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7605    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7606            available_hot_pixel_modes,
7607            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7608
7609    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7610                                         ANDROID_SHADING_MODE_FAST,
7611                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
7612    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7613                      available_shading_modes,
7614                      3);
7615
7616    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7617                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7618    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7619                      available_lens_shading_map_modes,
7620                      2);
7621
7622    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7623                                      ANDROID_EDGE_MODE_FAST,
7624                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
7625                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7626    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7627            available_edge_modes,
7628            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7629
7630    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7631                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
7632                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7633                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7634                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7635    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7636            available_noise_red_modes,
7637            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7638
7639    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7640                                         ANDROID_TONEMAP_MODE_FAST,
7641                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7642    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7643            available_tonemap_modes,
7644            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7645
7646    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7647    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7648            available_hot_pixel_map_modes,
7649            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7650
7651    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7652            gCamCapability[cameraId]->reference_illuminant1);
7653    if (NAME_NOT_FOUND != val) {
7654        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7655        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7656    }
7657
7658    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7659            gCamCapability[cameraId]->reference_illuminant2);
7660    if (NAME_NOT_FOUND != val) {
7661        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7662        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7663    }
7664
7665    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7666            (void *)gCamCapability[cameraId]->forward_matrix1,
7667            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7668
7669    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7670            (void *)gCamCapability[cameraId]->forward_matrix2,
7671            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7672
7673    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7674            (void *)gCamCapability[cameraId]->color_transform1,
7675            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7676
7677    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7678            (void *)gCamCapability[cameraId]->color_transform2,
7679            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7680
7681    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7682            (void *)gCamCapability[cameraId]->calibration_transform1,
7683            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7684
7685    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7686            (void *)gCamCapability[cameraId]->calibration_transform2,
7687            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7688
7689    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7690       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7691       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7692       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7693       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7694       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7695       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7696       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7697       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7698       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7699       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7700       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7701       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7702       ANDROID_JPEG_GPS_COORDINATES,
7703       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7704       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7705       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7706       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7707       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7708       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7709       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7710       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7711       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7712       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7713       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7714       ANDROID_STATISTICS_FACE_DETECT_MODE,
7715       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7716       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7717       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7718       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7719       /* DevCamDebug metadata request_keys_basic */
7720       DEVCAMDEBUG_META_ENABLE,
7721       /* DevCamDebug metadata end */
7722       };
7723
7724    size_t request_keys_cnt =
7725            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7726    Vector<int32_t> available_request_keys;
7727    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7728    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7729        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7730    }
7731
7732    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7733            available_request_keys.array(), available_request_keys.size());
7734
7735    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7736       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7737       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7738       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7739       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7740       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7741       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7742       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7743       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7744       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7745       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7746       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7747       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7748       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7749       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7750       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7751       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7752       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7753       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7754       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7755       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7756       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7757       ANDROID_STATISTICS_FACE_SCORES,
7758       NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7759       NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
7760       // DevCamDebug metadata result_keys_basic
7761       DEVCAMDEBUG_META_ENABLE,
7762       // DevCamDebug metadata result_keys AF
7763       DEVCAMDEBUG_AF_LENS_POSITION,
7764       DEVCAMDEBUG_AF_TOF_CONFIDENCE,
7765       DEVCAMDEBUG_AF_TOF_DISTANCE,
7766       DEVCAMDEBUG_AF_LUMA,
7767       DEVCAMDEBUG_AF_HAF_STATE,
7768       DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7769       DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7770       DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7771       DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7772       DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7773       DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7774       DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7775       DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7776       DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7777       DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7778       DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7779       DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7780       DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7781       DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7782       DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7783       DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7784       DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7785       DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7786       DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7787       DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7788       DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7789       DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7790       // DevCamDebug metadata result_keys AEC
7791       DEVCAMDEBUG_AEC_TARGET_LUMA,
7792       DEVCAMDEBUG_AEC_COMP_LUMA,
7793       DEVCAMDEBUG_AEC_AVG_LUMA,
7794       DEVCAMDEBUG_AEC_CUR_LUMA,
7795       DEVCAMDEBUG_AEC_LINECOUNT,
7796       DEVCAMDEBUG_AEC_REAL_GAIN,
7797       DEVCAMDEBUG_AEC_EXP_INDEX,
7798       DEVCAMDEBUG_AEC_LUX_IDX,
7799       // DevCamDebug metadata result_keys AWB
7800       DEVCAMDEBUG_AWB_R_GAIN,
7801       DEVCAMDEBUG_AWB_G_GAIN,
7802       DEVCAMDEBUG_AWB_B_GAIN,
7803       DEVCAMDEBUG_AWB_CCT,
7804       DEVCAMDEBUG_AWB_DECISION,
7805       /* DevCamDebug metadata end */
7806       };
7807    size_t result_keys_cnt =
7808            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7809
7810    Vector<int32_t> available_result_keys;
7811    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7812    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7813        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7814    }
7815    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7816        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7817        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7818    }
7819    if (supportedFaceDetectMode == 1) {
7820        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7821        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7822    } else if ((supportedFaceDetectMode == 2) ||
7823            (supportedFaceDetectMode == 3)) {
7824        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7825        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7826    }
7827    if (hasBlackRegions) {
7828        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7829        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7830    }
7831    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7832            available_result_keys.array(), available_result_keys.size());
7833
7834    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7835       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7836       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7837       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7838       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7839       ANDROID_SCALER_CROPPING_TYPE,
7840       ANDROID_SYNC_MAX_LATENCY,
7841       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7842       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7843       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7844       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7845       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7846       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7847       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7848       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7849       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7850       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7851       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7852       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7853       ANDROID_LENS_FACING,
7854       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7855       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7856       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7857       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7858       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7859       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7860       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7861       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7862       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7863       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7864       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7865       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7866       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7867       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7868       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7869       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7870       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7871       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7872       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7873       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7874       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7875       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7876       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7877       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7878       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7879       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7880       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7881       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7882       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7883       ANDROID_CONTROL_AVAILABLE_MODES,
7884       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7885       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7886       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7887       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7888       ANDROID_SHADING_AVAILABLE_MODES,
7889       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7890       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7891
7892    Vector<int32_t> available_characteristics_keys;
7893    available_characteristics_keys.appendArray(characteristics_keys_basic,
7894            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7895    if (hasBlackRegions) {
7896        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7897    }
7898    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7899                      available_characteristics_keys.array(),
7900                      available_characteristics_keys.size());
7901
7902    /*available stall durations depend on the hw + sw and will be different for different devices */
7903    /*have to add for raw after implementation*/
7904    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7905    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7906
7907    Vector<int64_t> available_stall_durations;
7908    for (uint32_t j = 0; j < stall_formats_count; j++) {
7909        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7910            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7911                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7912                available_stall_durations.add(stall_formats[j]);
7913                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7914                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7915                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7916          }
7917        } else {
7918            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7919                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7920                available_stall_durations.add(stall_formats[j]);
7921                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7922                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7923                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7924            }
7925        }
7926    }
7927    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7928                      available_stall_durations.array(),
7929                      available_stall_durations.size());
7930
7931    //QCAMERA3_OPAQUE_RAW
7932    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7933    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7934    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7935    case LEGACY_RAW:
7936        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7937            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7938        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7939            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7940        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7941            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7942        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7943        break;
7944    case MIPI_RAW:
7945        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7946            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7947        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7948            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7949        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7950            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7951        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7952        break;
7953    default:
7954        LOGE("unknown opaque_raw_format %d",
7955                gCamCapability[cameraId]->opaque_raw_fmt);
7956        break;
7957    }
7958    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7959
7960    Vector<int32_t> strides;
7961    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7962            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7963        cam_stream_buf_plane_info_t buf_planes;
7964        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7965        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7966        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7967            &gCamCapability[cameraId]->padding_info, &buf_planes);
7968        strides.add(buf_planes.plane_info.mp[0].stride);
7969    }
7970    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7971            strides.size());
7972
7973    Vector<int32_t> opaque_size;
7974    for (size_t j = 0; j < scalar_formats_count; j++) {
7975        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7976            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7977                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7978                cam_stream_buf_plane_info_t buf_planes;
7979
7980                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7981                         &gCamCapability[cameraId]->padding_info, &buf_planes);
7982
7983                if (rc == 0) {
7984                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7985                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7986                    opaque_size.add(buf_planes.plane_info.frame_len);
7987                }else {
7988                    LOGE("raw frame calculation failed!");
7989                }
7990            }
7991        }
7992    }
7993
7994    if ((opaque_size.size() > 0) &&
7995            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7996        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7997    else
7998        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7999
8000    gStaticMetadata[cameraId] = staticInfo.release();
8001    return rc;
8002}
8003
8004/*===========================================================================
8005 * FUNCTION   : makeTable
8006 *
8007 * DESCRIPTION: make a table of sizes
8008 *
8009 * PARAMETERS :
8010 *
8011 *
8012 *==========================================================================*/
8013void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8014        size_t max_size, int32_t *sizeTable)
8015{
8016    size_t j = 0;
8017    if (size > max_size) {
8018       size = max_size;
8019    }
8020    for (size_t i = 0; i < size; i++) {
8021        sizeTable[j] = dimTable[i].width;
8022        sizeTable[j+1] = dimTable[i].height;
8023        j+=2;
8024    }
8025}
8026
8027/*===========================================================================
8028 * FUNCTION   : makeFPSTable
8029 *
8030 * DESCRIPTION: make a table of fps ranges
8031 *
8032 * PARAMETERS :
8033 *
8034 *==========================================================================*/
8035void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8036        size_t max_size, int32_t *fpsRangesTable)
8037{
8038    size_t j = 0;
8039    if (size > max_size) {
8040       size = max_size;
8041    }
8042    for (size_t i = 0; i < size; i++) {
8043        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8044        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8045        j+=2;
8046    }
8047}
8048
8049/*===========================================================================
8050 * FUNCTION   : makeOverridesList
8051 *
8052 * DESCRIPTION: make a list of scene mode overrides
8053 *
8054 * PARAMETERS :
8055 *
8056 *
8057 *==========================================================================*/
8058void QCamera3HardwareInterface::makeOverridesList(
8059        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8060        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8061{
8062    /*daemon will give a list of overrides for all scene modes.
8063      However we should send the fwk only the overrides for the scene modes
8064      supported by the framework*/
8065    size_t j = 0;
8066    if (size > max_size) {
8067       size = max_size;
8068    }
8069    size_t focus_count = CAM_FOCUS_MODE_MAX;
8070    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8071            focus_count);
8072    for (size_t i = 0; i < size; i++) {
8073        bool supt = false;
8074        size_t index = supported_indexes[i];
8075        overridesList[j] = gCamCapability[camera_id]->flash_available ?
8076                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8077        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8078                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8079                overridesTable[index].awb_mode);
8080        if (NAME_NOT_FOUND != val) {
8081            overridesList[j+1] = (uint8_t)val;
8082        }
8083        uint8_t focus_override = overridesTable[index].af_mode;
8084        for (size_t k = 0; k < focus_count; k++) {
8085           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8086              supt = true;
8087              break;
8088           }
8089        }
8090        if (supt) {
8091            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8092                    focus_override);
8093            if (NAME_NOT_FOUND != val) {
8094                overridesList[j+2] = (uint8_t)val;
8095            }
8096        } else {
8097           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8098        }
8099        j+=3;
8100    }
8101}
8102
8103/*===========================================================================
8104 * FUNCTION   : filterJpegSizes
8105 *
8106 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8107 *              could be downscaled to
8108 *
8109 * PARAMETERS :
8110 *
8111 * RETURN     : length of jpegSizes array
8112 *==========================================================================*/
8113
8114size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8115        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8116        uint8_t downscale_factor)
8117{
8118    if (0 == downscale_factor) {
8119        downscale_factor = 1;
8120    }
8121
8122    int32_t min_width = active_array_size.width / downscale_factor;
8123    int32_t min_height = active_array_size.height / downscale_factor;
8124    size_t jpegSizesCnt = 0;
8125    if (processedSizesCnt > maxCount) {
8126        processedSizesCnt = maxCount;
8127    }
8128    for (size_t i = 0; i < processedSizesCnt; i+=2) {
8129        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8130            jpegSizes[jpegSizesCnt] = processedSizes[i];
8131            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8132            jpegSizesCnt += 2;
8133        }
8134    }
8135    return jpegSizesCnt;
8136}
8137
8138/*===========================================================================
8139 * FUNCTION   : computeNoiseModelEntryS
8140 *
8141 * DESCRIPTION: function to map a given sensitivity to the S noise
8142 *              model parameters in the DNG noise model.
8143 *
8144 * PARAMETERS : sens : the sensor sensitivity
8145 *
8146 ** RETURN    : S (sensor amplification) noise
8147 *
8148 *==========================================================================*/
8149double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8150    double s = gCamCapability[mCameraId]->gradient_S * sens +
8151            gCamCapability[mCameraId]->offset_S;
8152    return ((s < 0.0) ? 0.0 : s);
8153}
8154
8155/*===========================================================================
8156 * FUNCTION   : computeNoiseModelEntryO
8157 *
8158 * DESCRIPTION: function to map a given sensitivity to the O noise
8159 *              model parameters in the DNG noise model.
8160 *
8161 * PARAMETERS : sens : the sensor sensitivity
8162 *
8163 ** RETURN    : O (sensor readout) noise
8164 *
8165 *==========================================================================*/
8166double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8167    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8168    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8169            1.0 : (1.0 * sens / max_analog_sens);
8170    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8171            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8172    return ((o < 0.0) ? 0.0 : o);
8173}
8174
8175/*===========================================================================
8176 * FUNCTION   : getSensorSensitivity
8177 *
8178 * DESCRIPTION: convert iso_mode to an integer value
8179 *
8180 * PARAMETERS : iso_mode : the iso_mode supported by sensor
8181 *
8182 ** RETURN    : sensitivity supported by sensor
8183 *
8184 *==========================================================================*/
8185int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8186{
8187    int32_t sensitivity;
8188
8189    switch (iso_mode) {
8190    case CAM_ISO_MODE_100:
8191        sensitivity = 100;
8192        break;
8193    case CAM_ISO_MODE_200:
8194        sensitivity = 200;
8195        break;
8196    case CAM_ISO_MODE_400:
8197        sensitivity = 400;
8198        break;
8199    case CAM_ISO_MODE_800:
8200        sensitivity = 800;
8201        break;
8202    case CAM_ISO_MODE_1600:
8203        sensitivity = 1600;
8204        break;
8205    default:
8206        sensitivity = -1;
8207        break;
8208    }
8209    return sensitivity;
8210}
8211
8212/*===========================================================================
8213 * FUNCTION   : getCamInfo
8214 *
8215 * DESCRIPTION: query camera capabilities
8216 *
8217 * PARAMETERS :
8218 *   @cameraId  : camera Id
8219 *   @info      : camera info struct to be filled in with camera capabilities
8220 *
8221 * RETURN     : int type of status
8222 *              NO_ERROR  -- success
8223 *              none-zero failure code
8224 *==========================================================================*/
8225int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8226        struct camera_info *info)
8227{
8228    ATRACE_CALL();
8229    int rc = 0;
8230
8231    pthread_mutex_lock(&gCamLock);
8232    if (NULL == gCamCapability[cameraId]) {
8233        rc = initCapabilities(cameraId);
8234        if (rc < 0) {
8235            pthread_mutex_unlock(&gCamLock);
8236            return rc;
8237        }
8238    }
8239
8240    if (NULL == gStaticMetadata[cameraId]) {
8241        rc = initStaticMetadata(cameraId);
8242        if (rc < 0) {
8243            pthread_mutex_unlock(&gCamLock);
8244            return rc;
8245        }
8246    }
8247
8248    switch(gCamCapability[cameraId]->position) {
8249    case CAM_POSITION_BACK:
8250        info->facing = CAMERA_FACING_BACK;
8251        break;
8252
8253    case CAM_POSITION_FRONT:
8254        info->facing = CAMERA_FACING_FRONT;
8255        break;
8256
8257    default:
8258        LOGE("Unknown position type for camera id:%d", cameraId);
8259        rc = -1;
8260        break;
8261    }
8262
8263
8264    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8265    info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8266    info->static_camera_characteristics = gStaticMetadata[cameraId];
8267
8268    //For now assume both cameras can operate independently.
8269    info->conflicting_devices = NULL;
8270    info->conflicting_devices_length = 0;
8271
8272    //resource cost is 100 * MIN(1.0, m/M),
8273    //where m is throughput requirement with maximum stream configuration
8274    //and M is CPP maximum throughput.
8275    float max_fps = 0.0;
8276    for (uint32_t i = 0;
8277            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8278        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8279            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8280    }
8281    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8282            gCamCapability[cameraId]->active_array_size.width *
8283            gCamCapability[cameraId]->active_array_size.height * max_fps /
8284            gCamCapability[cameraId]->max_pixel_bandwidth;
8285    info->resource_cost = 100 * MIN(1.0, ratio);
8286    LOGI("camera %d resource cost is %d", cameraId,
8287            info->resource_cost);
8288
8289    pthread_mutex_unlock(&gCamLock);
8290    return rc;
8291}
8292
8293/*===========================================================================
8294 * FUNCTION   : translateCapabilityToMetadata
8295 *
8296 * DESCRIPTION: translate the capability into camera_metadata_t
8297 *
8298 * PARAMETERS : type of the request
8299 *
8300 *
8301 * RETURN     : success: camera_metadata_t*
8302 *              failure: NULL
8303 *
8304 *==========================================================================*/
8305camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8306{
8307    if (mDefaultMetadata[type] != NULL) {
8308        return mDefaultMetadata[type];
8309    }
8310    //first time we are handling this request
8311    //fill up the metadata structure using the wrapper class
8312    CameraMetadata settings;
8313    //translate from cam_capability_t to camera_metadata_tag_t
8314    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8315    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8316    int32_t defaultRequestID = 0;
8317    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8318
8319    /* OIS disable */
8320    char ois_prop[PROPERTY_VALUE_MAX];
8321    memset(ois_prop, 0, sizeof(ois_prop));
8322    property_get("persist.camera.ois.disable", ois_prop, "0");
8323    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8324
8325    /* Force video to use OIS */
8326    char videoOisProp[PROPERTY_VALUE_MAX];
8327    memset(videoOisProp, 0, sizeof(videoOisProp));
8328    property_get("persist.camera.ois.video", videoOisProp, "1");
8329    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8330
8331    // EIS enable/disable
8332    char eis_prop[PROPERTY_VALUE_MAX];
8333    memset(eis_prop, 0, sizeof(eis_prop));
8334    property_get("persist.camera.eis.enable", eis_prop, "0");
8335    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
8336
8337    // Hybrid AE enable/disable
8338    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8339    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8340    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8341    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8342
8343    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
8344    // This is a bit hacky. EIS is enabled only when the above setprop
8345    // is set to non-zero value and on back camera (for 2015 Nexus).
8346    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
8347    // configureStream is called before this function. In other words,
8348    // we cannot guarantee the app will call configureStream before
8349    // calling createDefaultRequest.
8350    const bool eisEnabled = facingBack && eis_prop_set;
8351
8352    uint8_t controlIntent = 0;
8353    uint8_t focusMode;
8354    uint8_t vsMode;
8355    uint8_t optStabMode;
8356    uint8_t cacMode;
8357    uint8_t edge_mode;
8358    uint8_t noise_red_mode;
8359    uint8_t tonemap_mode;
8360    bool highQualityModeEntryAvailable = FALSE;
8361    bool fastModeEntryAvailable = FALSE;
8362    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8363    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8364    switch (type) {
8365      case CAMERA3_TEMPLATE_PREVIEW:
8366        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8367        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8368        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8369        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8370        edge_mode = ANDROID_EDGE_MODE_FAST;
8371        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8372        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8373        break;
8374      case CAMERA3_TEMPLATE_STILL_CAPTURE:
8375        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8376        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8377        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8378        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8379        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8380        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8381        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8382        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8383        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8384            if (gCamCapability[mCameraId]->aberration_modes[i] ==
8385                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8386                highQualityModeEntryAvailable = TRUE;
8387            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8388                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8389                fastModeEntryAvailable = TRUE;
8390            }
8391        }
8392        if (highQualityModeEntryAvailable) {
8393            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8394        } else if (fastModeEntryAvailable) {
8395            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8396        }
8397        break;
8398      case CAMERA3_TEMPLATE_VIDEO_RECORD:
8399        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8400        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8401        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8402        if (eisEnabled) {
8403            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8404        }
8405        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8406        edge_mode = ANDROID_EDGE_MODE_FAST;
8407        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8408        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8409        if (forceVideoOis)
8410            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8411        break;
8412      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8413        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8414        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8415        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8416        if (eisEnabled) {
8417            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8418        }
8419        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8420        edge_mode = ANDROID_EDGE_MODE_FAST;
8421        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8422        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8423        if (forceVideoOis)
8424            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8425        break;
8426      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8427        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8428        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8429        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8430        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8431        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8432        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8433        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8434        break;
8435      case CAMERA3_TEMPLATE_MANUAL:
8436        edge_mode = ANDROID_EDGE_MODE_FAST;
8437        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8438        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8439        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8440        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8441        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8442        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8443        break;
8444      default:
8445        edge_mode = ANDROID_EDGE_MODE_FAST;
8446        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8447        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8448        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8449        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8450        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8451        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8452        break;
8453    }
8454    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8455    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8456    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8457    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8458        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8459    }
8460    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8461
8462    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8463            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8464        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8465    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8466            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8467            || ois_disable)
8468        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8469    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8470
8471    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8472            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8473
8474    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8475    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8476
8477    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8478    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8479
8480    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8481    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8482
8483    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8484    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8485
8486    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8487    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8488
8489    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8490    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8491
8492    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8493    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8494
8495    /*flash*/
8496    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8497    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8498
8499    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8500    settings.update(ANDROID_FLASH_FIRING_POWER,
8501            &flashFiringLevel, 1);
8502
8503    /* lens */
8504    float default_aperture = gCamCapability[mCameraId]->apertures[0];
8505    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8506
8507    if (gCamCapability[mCameraId]->filter_densities_count) {
8508        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8509        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8510                        gCamCapability[mCameraId]->filter_densities_count);
8511    }
8512
8513    float default_focal_length = gCamCapability[mCameraId]->focal_length;
8514    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8515
8516    if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8517        float default_focus_distance = 0;
8518        settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8519    }
8520
8521    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8522    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8523
8524    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8525    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8526
8527    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8528    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8529
8530    /* face detection (default to OFF) */
8531    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8532    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8533
8534    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8535    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8536
8537    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8538    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8539
8540    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8541    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8542
8543    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8544    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
8545
8546    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8547    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8548
8549    /* Exposure time(Update the Min Exposure Time)*/
8550    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8551    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8552
8553    /* frame duration */
8554    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8555    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8556
8557    /* sensitivity */
8558    static const int32_t default_sensitivity = 100;
8559    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8560    static const int32_t default_isp_sensitivity =
8561            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8562    settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8563
8564    /*edge mode*/
8565    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8566
8567    /*noise reduction mode*/
8568    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8569
8570    /*color correction mode*/
8571    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8572    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8573
8574    /*transform matrix mode*/
8575    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8576
8577    int32_t scaler_crop_region[4];
8578    scaler_crop_region[0] = 0;
8579    scaler_crop_region[1] = 0;
8580    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8581    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8582    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8583
8584    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8585    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8586
8587    /*focus distance*/
8588    float focus_distance = 0.0;
8589    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8590
8591    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8592    /* Restrict default preview template to max 30 fps */
8593    float max_range = 0.0;
8594    float max_fixed_fps = 0.0;
8595    int32_t fps_range[2] = {0, 0};
8596    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8597            i++) {
8598        if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8599                TEMPLATE_MAX_PREVIEW_FPS) {
8600            continue;
8601        }
8602        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8603            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8604        if (type == CAMERA3_TEMPLATE_PREVIEW ||
8605                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8606                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8607            if (range > max_range) {
8608                fps_range[0] =
8609                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8610                fps_range[1] =
8611                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8612                max_range = range;
8613            }
8614        } else {
8615            if (range < 0.01 && max_fixed_fps <
8616                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8617                fps_range[0] =
8618                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8619                fps_range[1] =
8620                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8621                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8622            }
8623        }
8624    }
8625    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8626
8627    /*precapture trigger*/
8628    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8629    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8630
8631    /*af trigger*/
8632    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8633    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8634
8635    /* ae & af regions */
8636    int32_t active_region[] = {
8637            gCamCapability[mCameraId]->active_array_size.left,
8638            gCamCapability[mCameraId]->active_array_size.top,
8639            gCamCapability[mCameraId]->active_array_size.left +
8640                    gCamCapability[mCameraId]->active_array_size.width,
8641            gCamCapability[mCameraId]->active_array_size.top +
8642                    gCamCapability[mCameraId]->active_array_size.height,
8643            0};
8644    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8645            sizeof(active_region) / sizeof(active_region[0]));
8646    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8647            sizeof(active_region) / sizeof(active_region[0]));
8648
8649    /* black level lock */
8650    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8651    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8652
8653    /* lens shading map mode */
8654    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8655    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8656        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8657    }
8658    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8659
8660    //special defaults for manual template
8661    if (type == CAMERA3_TEMPLATE_MANUAL) {
8662        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8663        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8664
8665        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8666        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8667
8668        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8669        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8670
8671        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8672        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8673
8674        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8675        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8676
8677        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8678        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8679    }
8680
8681
8682    /* TNR
8683     * We'll use this location to determine which modes TNR will be set.
8684     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8685     * This is not to be confused with linking on a per stream basis that decision
8686     * is still on per-session basis and will be handled as part of config stream
8687     */
8688    uint8_t tnr_enable = 0;
8689
8690    if (m_bTnrPreview || m_bTnrVideo) {
8691
8692        switch (type) {
8693            case CAMERA3_TEMPLATE_VIDEO_RECORD:
8694                    tnr_enable = 1;
8695                    break;
8696
8697            default:
8698                    tnr_enable = 0;
8699                    break;
8700        }
8701
8702        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8703        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8704        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8705
8706        LOGD("TNR:%d with process plate %d for template:%d",
8707                             tnr_enable, tnr_process_type, type);
8708    }
8709
8710    //Update Link tags to default
8711    int32_t sync_type = CAM_TYPE_STANDALONE;
8712    settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8713
8714    int32_t is_main = 0; //this doesn't matter as app should overwrite
8715    settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8716
8717    settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8718
8719    /* CDS default */
8720    char prop[PROPERTY_VALUE_MAX];
8721    memset(prop, 0, sizeof(prop));
8722    property_get("persist.camera.CDS", prop, "Auto");
8723    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8724    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8725    if (CAM_CDS_MODE_MAX == cds_mode) {
8726        cds_mode = CAM_CDS_MODE_AUTO;
8727    }
8728
8729    /* Disabling CDS in templates which have TNR enabled*/
8730    if (tnr_enable)
8731        cds_mode = CAM_CDS_MODE_OFF;
8732
8733    int32_t mode = cds_mode;
8734    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8735
8736    /* hybrid ae */
8737    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8738
8739    mDefaultMetadata[type] = settings.release();
8740
8741    return mDefaultMetadata[type];
8742}
8743
8744/*===========================================================================
8745 * FUNCTION   : setFrameParameters
8746 *
8747 * DESCRIPTION: set parameters per frame as requested in the metadata from
8748 *              framework
8749 *
8750 * PARAMETERS :
8751 *   @request   : request that needs to be serviced
8752 *   @streamsArray : Stream ID of all the requested streams
8753 *   @blob_request: Whether this request is a blob request or not
8754 *
8755 * RETURN     : success: NO_ERROR
8756 *              failure:
8757 *==========================================================================*/
8758int QCamera3HardwareInterface::setFrameParameters(
8759                    camera3_capture_request_t *request,
8760                    cam_stream_ID_t streamsArray,
8761                    int blob_request,
8762                    uint32_t snapshotStreamId)
8763{
8764    /*translate from camera_metadata_t type to parm_type_t*/
8765    int rc = 0;
8766    int32_t hal_version = CAM_HAL_V3;
8767
8768    clear_metadata_buffer(mParameters);
8769    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8770        LOGE("Failed to set hal version in the parameters");
8771        return BAD_VALUE;
8772    }
8773
8774    /*we need to update the frame number in the parameters*/
8775    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8776            request->frame_number)) {
8777        LOGE("Failed to set the frame number in the parameters");
8778        return BAD_VALUE;
8779    }
8780
8781    /* Update stream id of all the requested buffers */
8782    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
8783        LOGE("Failed to set stream type mask in the parameters");
8784        return BAD_VALUE;
8785    }
8786
8787    if (mUpdateDebugLevel) {
8788        uint32_t dummyDebugLevel = 0;
8789        /* The value of dummyDebugLevel is irrelavent. On
8790         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8791        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8792                dummyDebugLevel)) {
8793            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8794            return BAD_VALUE;
8795        }
8796        mUpdateDebugLevel = false;
8797    }
8798
8799    if(request->settings != NULL){
8800        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8801        if (blob_request)
8802            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8803    }
8804
8805    return rc;
8806}
8807
8808/*===========================================================================
8809 * FUNCTION   : setReprocParameters
8810 *
8811 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8812 *              return it.
8813 *
8814 * PARAMETERS :
8815 *   @request   : request that needs to be serviced
8816 *
8817 * RETURN     : success: NO_ERROR
8818 *              failure:
8819 *==========================================================================*/
8820int32_t QCamera3HardwareInterface::setReprocParameters(
8821        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8822        uint32_t snapshotStreamId)
8823{
8824    /*translate from camera_metadata_t type to parm_type_t*/
8825    int rc = 0;
8826
8827    if (NULL == request->settings){
8828        LOGE("Reprocess settings cannot be NULL");
8829        return BAD_VALUE;
8830    }
8831
8832    if (NULL == reprocParam) {
8833        LOGE("Invalid reprocessing metadata buffer");
8834        return BAD_VALUE;
8835    }
8836    clear_metadata_buffer(reprocParam);
8837
8838    /*we need to update the frame number in the parameters*/
8839    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8840            request->frame_number)) {
8841        LOGE("Failed to set the frame number in the parameters");
8842        return BAD_VALUE;
8843    }
8844
8845    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8846    if (rc < 0) {
8847        LOGE("Failed to translate reproc request");
8848        return rc;
8849    }
8850
8851    CameraMetadata frame_settings;
8852    frame_settings = request->settings;
8853    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8854            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8855        int32_t *crop_count =
8856                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8857        int32_t *crop_data =
8858                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8859        int32_t *roi_map =
8860                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8861        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8862            cam_crop_data_t crop_meta;
8863            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8864            crop_meta.num_of_streams = 1;
8865            crop_meta.crop_info[0].crop.left   = crop_data[0];
8866            crop_meta.crop_info[0].crop.top    = crop_data[1];
8867            crop_meta.crop_info[0].crop.width  = crop_data[2];
8868            crop_meta.crop_info[0].crop.height = crop_data[3];
8869
8870            crop_meta.crop_info[0].roi_map.left =
8871                    roi_map[0];
8872            crop_meta.crop_info[0].roi_map.top =
8873                    roi_map[1];
8874            crop_meta.crop_info[0].roi_map.width =
8875                    roi_map[2];
8876            crop_meta.crop_info[0].roi_map.height =
8877                    roi_map[3];
8878
8879            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8880                rc = BAD_VALUE;
8881            }
8882            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8883                    request->input_buffer->stream,
8884                    crop_meta.crop_info[0].crop.left,
8885                    crop_meta.crop_info[0].crop.top,
8886                    crop_meta.crop_info[0].crop.width,
8887                    crop_meta.crop_info[0].crop.height);
8888            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8889                    request->input_buffer->stream,
8890                    crop_meta.crop_info[0].roi_map.left,
8891                    crop_meta.crop_info[0].roi_map.top,
8892                    crop_meta.crop_info[0].roi_map.width,
8893                    crop_meta.crop_info[0].roi_map.height);
8894            } else {
8895                LOGE("Invalid reprocess crop count %d!", *crop_count);
8896            }
8897    } else {
8898        LOGE("No crop data from matching output stream");
8899    }
8900
8901    /* These settings are not needed for regular requests so handle them specially for
8902       reprocess requests; information needed for EXIF tags */
8903    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8904        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8905                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8906        if (NAME_NOT_FOUND != val) {
8907            uint32_t flashMode = (uint32_t)val;
8908            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8909                rc = BAD_VALUE;
8910            }
8911        } else {
8912            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8913                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8914        }
8915    } else {
8916        LOGH("No flash mode in reprocess settings");
8917    }
8918
8919    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8920        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8921        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8922            rc = BAD_VALUE;
8923        }
8924    } else {
8925        LOGH("No flash state in reprocess settings");
8926    }
8927
8928    return rc;
8929}
8930
8931/*===========================================================================
8932 * FUNCTION   : saveRequestSettings
8933 *
8934 * DESCRIPTION: Add any settings that might have changed to the request settings
8935 *              and save the settings to be applied on the frame
8936 *
8937 * PARAMETERS :
8938 *   @jpegMetadata : the extracted and/or modified jpeg metadata
8939 *   @request      : request with initial settings
8940 *
8941 * RETURN     :
8942 * camera_metadata_t* : pointer to the saved request settings
8943 *==========================================================================*/
8944camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8945        const CameraMetadata &jpegMetadata,
8946        camera3_capture_request_t *request)
8947{
8948    camera_metadata_t *resultMetadata;
8949    CameraMetadata camMetadata;
8950    camMetadata = request->settings;
8951
8952    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8953        int32_t thumbnail_size[2];
8954        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8955        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8956        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8957                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8958    }
8959
8960    resultMetadata = camMetadata.release();
8961    return resultMetadata;
8962}
8963
8964/*===========================================================================
8965 * FUNCTION   : setHalFpsRange
8966 *
8967 * DESCRIPTION: set FPS range parameter
8968 *
8969 *
8970 * PARAMETERS :
8971 *   @settings    : Metadata from framework
8972 *   @hal_metadata: Metadata buffer
8973 *
8974 *
8975 * RETURN     : success: NO_ERROR
8976 *              failure:
8977 *==========================================================================*/
8978int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8979        metadata_buffer_t *hal_metadata)
8980{
8981    int32_t rc = NO_ERROR;
8982    cam_fps_range_t fps_range;
8983    fps_range.min_fps = (float)
8984            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8985    fps_range.max_fps = (float)
8986            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8987    fps_range.video_min_fps = fps_range.min_fps;
8988    fps_range.video_max_fps = fps_range.max_fps;
8989
8990    LOGD("aeTargetFpsRange fps: [%f %f]",
8991            fps_range.min_fps, fps_range.max_fps);
8992    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8993     * follows:
8994     * ---------------------------------------------------------------|
8995     *      Video stream is absent in configure_streams               |
8996     *    (Camcorder preview before the first video record            |
8997     * ---------------------------------------------------------------|
8998     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8999     *                   |             |             | vid_min/max_fps|
9000     * ---------------------------------------------------------------|
9001     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9002     *                   |-------------|-------------|----------------|
9003     *                   |  [240, 240] |     240     |  [240, 240]    |
9004     * ---------------------------------------------------------------|
9005     *     Video stream is present in configure_streams               |
9006     * ---------------------------------------------------------------|
9007     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9008     *                   |             |             | vid_min/max_fps|
9009     * ---------------------------------------------------------------|
9010     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9011     * (camcorder prev   |-------------|-------------|----------------|
9012     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
9013     *  is stopped)      |             |             |                |
9014     * ---------------------------------------------------------------|
9015     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
9016     *                   |-------------|-------------|----------------|
9017     *                   |  [240, 240] |     240     |  [240, 240]    |
9018     * ---------------------------------------------------------------|
9019     * When Video stream is absent in configure_streams,
9020     * preview fps = sensor_fps / batchsize
9021     * Eg: for 240fps at batchSize 4, preview = 60fps
9022     *     for 120fps at batchSize 4, preview = 30fps
9023     *
9024     * When video stream is present in configure_streams, preview fps is as per
9025     * the ratio of preview buffers to video buffers requested in process
9026     * capture request
9027     */
9028    mBatchSize = 0;
9029    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9030        fps_range.min_fps = fps_range.video_max_fps;
9031        fps_range.video_min_fps = fps_range.video_max_fps;
9032        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9033                fps_range.max_fps);
9034        if (NAME_NOT_FOUND != val) {
9035            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9036            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9037                return BAD_VALUE;
9038            }
9039
9040            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9041                /* If batchmode is currently in progress and the fps changes,
9042                 * set the flag to restart the sensor */
9043                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9044                        (mHFRVideoFps != fps_range.max_fps)) {
9045                    mNeedSensorRestart = true;
9046                }
9047                mHFRVideoFps = fps_range.max_fps;
9048                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9049                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9050                    mBatchSize = MAX_HFR_BATCH_SIZE;
9051                }
9052             }
9053            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9054
9055         }
9056    } else {
9057        /* HFR mode is session param in backend/ISP. This should be reset when
9058         * in non-HFR mode  */
9059        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9060        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9061            return BAD_VALUE;
9062        }
9063    }
9064    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9065        return BAD_VALUE;
9066    }
9067    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9068            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9069    return rc;
9070}
9071
9072/*===========================================================================
9073 * FUNCTION   : translateToHalMetadata
9074 *
9075 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9076 *
9077 *
9078 * PARAMETERS :
9079 *   @request  : request sent from framework
9080 *
9081 *
9082 * RETURN     : success: NO_ERROR
9083 *              failure:
9084 *==========================================================================*/
9085int QCamera3HardwareInterface::translateToHalMetadata
9086                                  (const camera3_capture_request_t *request,
9087                                   metadata_buffer_t *hal_metadata,
9088                                   uint32_t snapshotStreamId)
9089{
9090    int rc = 0;
9091    CameraMetadata frame_settings;
9092    frame_settings = request->settings;
9093
9094    /* Do not change the order of the following list unless you know what you are
9095     * doing.
9096     * The order is laid out in such a way that parameters in the front of the table
9097     * may be used to override the parameters later in the table. Examples are:
9098     * 1. META_MODE should precede AEC/AWB/AF MODE
9099     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9100     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9101     * 4. Any mode should precede it's corresponding settings
9102     */
9103    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9104        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9105        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9106            rc = BAD_VALUE;
9107        }
9108        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9109        if (rc != NO_ERROR) {
9110            LOGE("extractSceneMode failed");
9111        }
9112    }
9113
9114    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9115        uint8_t fwk_aeMode =
9116            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9117        uint8_t aeMode;
9118        int32_t redeye;
9119
9120        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9121            aeMode = CAM_AE_MODE_OFF;
9122        } else {
9123            aeMode = CAM_AE_MODE_ON;
9124        }
9125        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9126            redeye = 1;
9127        } else {
9128            redeye = 0;
9129        }
9130
9131        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9132                fwk_aeMode);
9133        if (NAME_NOT_FOUND != val) {
9134            int32_t flashMode = (int32_t)val;
9135            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9136        }
9137
9138        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9139        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9140            rc = BAD_VALUE;
9141        }
9142    }
9143
9144    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9145        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9146        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9147                fwk_whiteLevel);
9148        if (NAME_NOT_FOUND != val) {
9149            uint8_t whiteLevel = (uint8_t)val;
9150            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9151                rc = BAD_VALUE;
9152            }
9153        }
9154    }
9155
9156    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9157        uint8_t fwk_cacMode =
9158                frame_settings.find(
9159                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9160        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9161                fwk_cacMode);
9162        if (NAME_NOT_FOUND != val) {
9163            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9164            bool entryAvailable = FALSE;
9165            // Check whether Frameworks set CAC mode is supported in device or not
9166            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9167                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9168                    entryAvailable = TRUE;
9169                    break;
9170                }
9171            }
9172            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9173            // If entry not found then set the device supported mode instead of frameworks mode i.e,
9174            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9175            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9176            if (entryAvailable == FALSE) {
9177                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9178                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9179                } else {
9180                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9181                        // High is not supported and so set the FAST as spec say's underlying
9182                        // device implementation can be the same for both modes.
9183                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9184                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9185                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9186                        // in order to avoid the fps drop due to high quality
9187                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9188                    } else {
9189                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9190                    }
9191                }
9192            }
9193            LOGD("Final cacMode is %d", cacMode);
9194            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9195                rc = BAD_VALUE;
9196            }
9197        } else {
9198            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9199        }
9200    }
9201
9202    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9203        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9204        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9205                fwk_focusMode);
9206        if (NAME_NOT_FOUND != val) {
9207            uint8_t focusMode = (uint8_t)val;
9208            LOGD("set focus mode %d", focusMode);
9209            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9210                rc = BAD_VALUE;
9211            }
9212        }
9213    }
9214
9215    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9216        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9217        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9218                focalDistance)) {
9219            rc = BAD_VALUE;
9220        }
9221    }
9222
9223    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9224        uint8_t fwk_antibandingMode =
9225                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9226        int val = lookupHalName(ANTIBANDING_MODES_MAP,
9227                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9228        if (NAME_NOT_FOUND != val) {
9229            uint32_t hal_antibandingMode = (uint32_t)val;
9230            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9231                    hal_antibandingMode)) {
9232                rc = BAD_VALUE;
9233            }
9234        }
9235    }
9236
9237    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9238        int32_t expCompensation = frame_settings.find(
9239                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9240        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9241            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9242        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9243            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9244        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9245                expCompensation)) {
9246            rc = BAD_VALUE;
9247        }
9248    }
9249
9250    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9251        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9252        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9253            rc = BAD_VALUE;
9254        }
9255    }
9256    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9257        rc = setHalFpsRange(frame_settings, hal_metadata);
9258        if (rc != NO_ERROR) {
9259            LOGE("setHalFpsRange failed");
9260        }
9261    }
9262
9263    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9264        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9265        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9266            rc = BAD_VALUE;
9267        }
9268    }
9269
9270    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9271        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9272        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9273                fwk_effectMode);
9274        if (NAME_NOT_FOUND != val) {
9275            uint8_t effectMode = (uint8_t)val;
9276            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9277                rc = BAD_VALUE;
9278            }
9279        }
9280    }
9281
9282    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9283        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9284        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9285                colorCorrectMode)) {
9286            rc = BAD_VALUE;
9287        }
9288    }
9289
9290    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9291        cam_color_correct_gains_t colorCorrectGains;
9292        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9293            colorCorrectGains.gains[i] =
9294                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9295        }
9296        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9297                colorCorrectGains)) {
9298            rc = BAD_VALUE;
9299        }
9300    }
9301
9302    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9303        cam_color_correct_matrix_t colorCorrectTransform;
9304        cam_rational_type_t transform_elem;
9305        size_t num = 0;
9306        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9307           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9308              transform_elem.numerator =
9309                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9310              transform_elem.denominator =
9311                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9312              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9313              num++;
9314           }
9315        }
9316        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9317                colorCorrectTransform)) {
9318            rc = BAD_VALUE;
9319        }
9320    }
9321
9322    cam_trigger_t aecTrigger;
9323    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9324    aecTrigger.trigger_id = -1;
9325    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9326        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9327        aecTrigger.trigger =
9328            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9329        aecTrigger.trigger_id =
9330            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9331        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9332                aecTrigger)) {
9333            rc = BAD_VALUE;
9334        }
9335        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9336                aecTrigger.trigger, aecTrigger.trigger_id);
9337    }
9338
9339    /*af_trigger must come with a trigger id*/
9340    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9341        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9342        cam_trigger_t af_trigger;
9343        af_trigger.trigger =
9344            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9345        af_trigger.trigger_id =
9346            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9347        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9348            rc = BAD_VALUE;
9349        }
9350        LOGD("AfTrigger: %d AfTriggerID: %d",
9351                af_trigger.trigger, af_trigger.trigger_id);
9352    }
9353
9354    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9355        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9356        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9357            rc = BAD_VALUE;
9358        }
9359    }
9360    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9361        cam_edge_application_t edge_application;
9362        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9363        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9364            edge_application.sharpness = 0;
9365        } else {
9366            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9367        }
9368        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9369            rc = BAD_VALUE;
9370        }
9371    }
9372
9373    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9374        int32_t respectFlashMode = 1;
9375        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9376            uint8_t fwk_aeMode =
9377                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9378            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
9379                respectFlashMode = 0;
9380                LOGH("AE Mode controls flash, ignore android.flash.mode");
9381            }
9382        }
9383        if (respectFlashMode) {
9384            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9385                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9386            LOGH("flash mode after mapping %d", val);
9387            // To check: CAM_INTF_META_FLASH_MODE usage
9388            if (NAME_NOT_FOUND != val) {
9389                uint8_t flashMode = (uint8_t)val;
9390                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9391                    rc = BAD_VALUE;
9392                }
9393            }
9394        }
9395    }
9396
9397    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9398        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9399        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9400            rc = BAD_VALUE;
9401        }
9402    }
9403
9404    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9405        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9406        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9407                flashFiringTime)) {
9408            rc = BAD_VALUE;
9409        }
9410    }
9411
9412    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9413        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9414        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9415                hotPixelMode)) {
9416            rc = BAD_VALUE;
9417        }
9418    }
9419
9420    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9421        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9422        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9423                lensAperture)) {
9424            rc = BAD_VALUE;
9425        }
9426    }
9427
9428    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9429        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9430        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9431                filterDensity)) {
9432            rc = BAD_VALUE;
9433        }
9434    }
9435
9436    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9437        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9438        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9439                focalLength)) {
9440            rc = BAD_VALUE;
9441        }
9442    }
9443
9444    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9445        uint8_t optStabMode =
9446                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9447        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9448                optStabMode)) {
9449            rc = BAD_VALUE;
9450        }
9451    }
9452
9453    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9454        uint8_t videoStabMode =
9455                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9456        LOGD("videoStabMode from APP = %d", videoStabMode);
9457        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9458                videoStabMode)) {
9459            rc = BAD_VALUE;
9460        }
9461    }
9462
9463
9464    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9465        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9466        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9467                noiseRedMode)) {
9468            rc = BAD_VALUE;
9469        }
9470    }
9471
9472    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9473        float reprocessEffectiveExposureFactor =
9474            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9475        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9476                reprocessEffectiveExposureFactor)) {
9477            rc = BAD_VALUE;
9478        }
9479    }
9480
9481    cam_crop_region_t scalerCropRegion;
9482    bool scalerCropSet = false;
9483    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9484        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9485        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9486        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9487        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9488
9489        // Map coordinate system from active array to sensor output.
9490        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9491                scalerCropRegion.width, scalerCropRegion.height);
9492
9493        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9494                scalerCropRegion)) {
9495            rc = BAD_VALUE;
9496        }
9497        scalerCropSet = true;
9498    }
9499
9500    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9501        int64_t sensorExpTime =
9502                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9503        LOGD("setting sensorExpTime %lld", sensorExpTime);
9504        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9505                sensorExpTime)) {
9506            rc = BAD_VALUE;
9507        }
9508    }
9509
9510    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9511        int64_t sensorFrameDuration =
9512                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9513        int64_t minFrameDuration = getMinFrameDuration(request);
9514        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9515        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9516            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9517        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9518        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9519                sensorFrameDuration)) {
9520            rc = BAD_VALUE;
9521        }
9522    }
9523
9524    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9525        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9526        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9527                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9528        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9529                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9530        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9531        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9532                sensorSensitivity)) {
9533            rc = BAD_VALUE;
9534        }
9535    }
9536
9537    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9538        int32_t ispSensitivity =
9539            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9540        if (ispSensitivity <
9541            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9542                ispSensitivity =
9543                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9544                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9545        }
9546        if (ispSensitivity >
9547            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9548                ispSensitivity =
9549                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9550                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9551        }
9552        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9553                ispSensitivity)) {
9554            rc = BAD_VALUE;
9555        }
9556    }
9557
9558    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9559        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9560        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9561            rc = BAD_VALUE;
9562        }
9563    }
9564
9565    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9566        uint8_t fwk_facedetectMode =
9567                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9568
9569        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9570                fwk_facedetectMode);
9571
9572        if (NAME_NOT_FOUND != val) {
9573            uint8_t facedetectMode = (uint8_t)val;
9574            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9575                    facedetectMode)) {
9576                rc = BAD_VALUE;
9577            }
9578        }
9579    }
9580
9581    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9582        uint8_t histogramMode =
9583                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9584        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9585                histogramMode)) {
9586            rc = BAD_VALUE;
9587        }
9588    }
9589
9590    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9591        uint8_t sharpnessMapMode =
9592                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9593        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9594                sharpnessMapMode)) {
9595            rc = BAD_VALUE;
9596        }
9597    }
9598
9599    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9600        uint8_t tonemapMode =
9601                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9602        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9603            rc = BAD_VALUE;
9604        }
9605    }
9606    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9607    /*All tonemap channels will have the same number of points*/
9608    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9609        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9610        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9611        cam_rgb_tonemap_curves tonemapCurves;
9612        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9613        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9614            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9615                     tonemapCurves.tonemap_points_cnt,
9616                    CAM_MAX_TONEMAP_CURVE_SIZE);
9617            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9618        }
9619
9620        /* ch0 = G*/
9621        size_t point = 0;
9622        cam_tonemap_curve_t tonemapCurveGreen;
9623        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9624            for (size_t j = 0; j < 2; j++) {
9625               tonemapCurveGreen.tonemap_points[i][j] =
9626                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9627               point++;
9628            }
9629        }
9630        tonemapCurves.curves[0] = tonemapCurveGreen;
9631
9632        /* ch 1 = B */
9633        point = 0;
9634        cam_tonemap_curve_t tonemapCurveBlue;
9635        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9636            for (size_t j = 0; j < 2; j++) {
9637               tonemapCurveBlue.tonemap_points[i][j] =
9638                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9639               point++;
9640            }
9641        }
9642        tonemapCurves.curves[1] = tonemapCurveBlue;
9643
9644        /* ch 2 = R */
9645        point = 0;
9646        cam_tonemap_curve_t tonemapCurveRed;
9647        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9648            for (size_t j = 0; j < 2; j++) {
9649               tonemapCurveRed.tonemap_points[i][j] =
9650                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9651               point++;
9652            }
9653        }
9654        tonemapCurves.curves[2] = tonemapCurveRed;
9655
9656        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9657                tonemapCurves)) {
9658            rc = BAD_VALUE;
9659        }
9660    }
9661
9662    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9663        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9664        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9665                captureIntent)) {
9666            rc = BAD_VALUE;
9667        }
9668    }
9669
9670    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9671        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9672        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9673                blackLevelLock)) {
9674            rc = BAD_VALUE;
9675        }
9676    }
9677
9678    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9679        uint8_t lensShadingMapMode =
9680                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9681        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9682                lensShadingMapMode)) {
9683            rc = BAD_VALUE;
9684        }
9685    }
9686
9687    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9688        cam_area_t roi;
9689        bool reset = true;
9690        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9691
9692        // Map coordinate system from active array to sensor output.
9693        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9694                roi.rect.height);
9695
9696        if (scalerCropSet) {
9697            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9698        }
9699        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9700            rc = BAD_VALUE;
9701        }
9702    }
9703
9704    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9705        cam_area_t roi;
9706        bool reset = true;
9707        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9708
9709        // Map coordinate system from active array to sensor output.
9710        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9711                roi.rect.height);
9712
9713        if (scalerCropSet) {
9714            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9715        }
9716        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9717            rc = BAD_VALUE;
9718        }
9719    }
9720
9721    // CDS for non-HFR non-video mode
9722    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9723            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9724        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9725        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9726            LOGE("Invalid CDS mode %d!", *fwk_cds);
9727        } else {
9728            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9729                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9730                rc = BAD_VALUE;
9731            }
9732        }
9733    }
9734
9735    // TNR
9736    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9737        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9738        uint8_t b_TnrRequested = 0;
9739        cam_denoise_param_t tnr;
9740        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9741        tnr.process_plates =
9742            (cam_denoise_process_type_t)frame_settings.find(
9743            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9744        b_TnrRequested = tnr.denoise_enable;
9745        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9746            rc = BAD_VALUE;
9747        }
9748    }
9749
9750    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9751        int32_t fwk_testPatternMode =
9752                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9753        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9754                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9755
9756        if (NAME_NOT_FOUND != testPatternMode) {
9757            cam_test_pattern_data_t testPatternData;
9758            memset(&testPatternData, 0, sizeof(testPatternData));
9759            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9760            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9761                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9762                int32_t *fwk_testPatternData =
9763                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9764                testPatternData.r = fwk_testPatternData[0];
9765                testPatternData.b = fwk_testPatternData[3];
9766                switch (gCamCapability[mCameraId]->color_arrangement) {
9767                    case CAM_FILTER_ARRANGEMENT_RGGB:
9768                    case CAM_FILTER_ARRANGEMENT_GRBG:
9769                        testPatternData.gr = fwk_testPatternData[1];
9770                        testPatternData.gb = fwk_testPatternData[2];
9771                        break;
9772                    case CAM_FILTER_ARRANGEMENT_GBRG:
9773                    case CAM_FILTER_ARRANGEMENT_BGGR:
9774                        testPatternData.gr = fwk_testPatternData[2];
9775                        testPatternData.gb = fwk_testPatternData[1];
9776                        break;
9777                    default:
9778                        LOGE("color arrangement %d is not supported",
9779                                gCamCapability[mCameraId]->color_arrangement);
9780                        break;
9781                }
9782            }
9783            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9784                    testPatternData)) {
9785                rc = BAD_VALUE;
9786            }
9787        } else {
9788            LOGE("Invalid framework sensor test pattern mode %d",
9789                    fwk_testPatternMode);
9790        }
9791    }
9792
9793    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9794        size_t count = 0;
9795        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9796        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9797                gps_coords.data.d, gps_coords.count, count);
9798        if (gps_coords.count != count) {
9799            rc = BAD_VALUE;
9800        }
9801    }
9802
9803    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9804        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9805        size_t count = 0;
9806        const char *gps_methods_src = (const char *)
9807                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9808        memset(gps_methods, '\0', sizeof(gps_methods));
9809        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9810        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9811                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9812        if (GPS_PROCESSING_METHOD_SIZE != count) {
9813            rc = BAD_VALUE;
9814        }
9815    }
9816
9817    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9818        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9819        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9820                gps_timestamp)) {
9821            rc = BAD_VALUE;
9822        }
9823    }
9824
9825    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9826        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9827        cam_rotation_info_t rotation_info;
9828        if (orientation == 0) {
9829           rotation_info.rotation = ROTATE_0;
9830        } else if (orientation == 90) {
9831           rotation_info.rotation = ROTATE_90;
9832        } else if (orientation == 180) {
9833           rotation_info.rotation = ROTATE_180;
9834        } else if (orientation == 270) {
9835           rotation_info.rotation = ROTATE_270;
9836        }
9837        rotation_info.device_rotation = ROTATE_0;
9838        rotation_info.streamId = snapshotStreamId;
9839        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9840        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9841            rc = BAD_VALUE;
9842        }
9843    }
9844
9845    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9846        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9847        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9848            rc = BAD_VALUE;
9849        }
9850    }
9851
9852    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9853        uint32_t thumb_quality = (uint32_t)
9854                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9855        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9856                thumb_quality)) {
9857            rc = BAD_VALUE;
9858        }
9859    }
9860
9861    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9862        cam_dimension_t dim;
9863        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9864        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9865        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9866            rc = BAD_VALUE;
9867        }
9868    }
9869
9870    // Internal metadata
9871    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9872        size_t count = 0;
9873        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9874        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9875                privatedata.data.i32, privatedata.count, count);
9876        if (privatedata.count != count) {
9877            rc = BAD_VALUE;
9878        }
9879    }
9880
9881    // EV step
9882    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9883            gCamCapability[mCameraId]->exp_compensation_step)) {
9884        rc = BAD_VALUE;
9885    }
9886
9887    // CDS info
9888    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9889        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9890                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9891
9892        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9893                CAM_INTF_META_CDS_DATA, *cdsData)) {
9894            rc = BAD_VALUE;
9895        }
9896    }
9897
9898    // Hybrid AE
9899    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9900        uint8_t *hybrid_ae = (uint8_t *)
9901                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9902
9903        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9904                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9905            rc = BAD_VALUE;
9906        }
9907    }
9908
9909    return rc;
9910}
9911
9912/*===========================================================================
9913 * FUNCTION   : captureResultCb
9914 *
9915 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9916 *
9917 * PARAMETERS :
9918 *   @frame  : frame information from mm-camera-interface
9919 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9920 *   @userdata: userdata
9921 *
9922 * RETURN     : NONE
9923 *==========================================================================*/
9924void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9925                camera3_stream_buffer_t *buffer,
9926                uint32_t frame_number, bool isInputBuffer, void *userdata)
9927{
9928    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9929    if (hw == NULL) {
9930        LOGE("Invalid hw %p", hw);
9931        return;
9932    }
9933
9934    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9935    return;
9936}
9937
9938
9939/*===========================================================================
9940 * FUNCTION   : initialize
9941 *
9942 * DESCRIPTION: Pass framework callback pointers to HAL
9943 *
9944 * PARAMETERS :
9945 *
9946 *
9947 * RETURN     : Success : 0
9948 *              Failure: -ENODEV
9949 *==========================================================================*/
9950
9951int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9952                                  const camera3_callback_ops_t *callback_ops)
9953{
9954    LOGD("E");
9955    QCamera3HardwareInterface *hw =
9956        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9957    if (!hw) {
9958        LOGE("NULL camera device");
9959        return -ENODEV;
9960    }
9961
9962    int rc = hw->initialize(callback_ops);
9963    LOGD("X");
9964    return rc;
9965}
9966
9967/*===========================================================================
9968 * FUNCTION   : configure_streams
9969 *
9970 * DESCRIPTION:
9971 *
9972 * PARAMETERS :
9973 *
9974 *
9975 * RETURN     : Success: 0
9976 *              Failure: -EINVAL (if stream configuration is invalid)
9977 *                       -ENODEV (fatal error)
9978 *==========================================================================*/
9979
9980int QCamera3HardwareInterface::configure_streams(
9981        const struct camera3_device *device,
9982        camera3_stream_configuration_t *stream_list)
9983{
9984    LOGD("E");
9985    QCamera3HardwareInterface *hw =
9986        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9987    if (!hw) {
9988        LOGE("NULL camera device");
9989        return -ENODEV;
9990    }
9991    int rc = hw->configureStreams(stream_list);
9992    LOGD("X");
9993    return rc;
9994}
9995
9996/*===========================================================================
9997 * FUNCTION   : construct_default_request_settings
9998 *
9999 * DESCRIPTION: Configure a settings buffer to meet the required use case
10000 *
10001 * PARAMETERS :
10002 *
10003 *
10004 * RETURN     : Success: Return valid metadata
10005 *              Failure: Return NULL
10006 *==========================================================================*/
10007const camera_metadata_t* QCamera3HardwareInterface::
10008    construct_default_request_settings(const struct camera3_device *device,
10009                                        int type)
10010{
10011
10012    LOGD("E");
10013    camera_metadata_t* fwk_metadata = NULL;
10014    QCamera3HardwareInterface *hw =
10015        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10016    if (!hw) {
10017        LOGE("NULL camera device");
10018        return NULL;
10019    }
10020
10021    fwk_metadata = hw->translateCapabilityToMetadata(type);
10022
10023    LOGD("X");
10024    return fwk_metadata;
10025}
10026
10027/*===========================================================================
10028 * FUNCTION   : process_capture_request
10029 *
10030 * DESCRIPTION:
10031 *
10032 * PARAMETERS :
10033 *
10034 *
10035 * RETURN     :
10036 *==========================================================================*/
10037int QCamera3HardwareInterface::process_capture_request(
10038                    const struct camera3_device *device,
10039                    camera3_capture_request_t *request)
10040{
10041    LOGD("E");
10042    QCamera3HardwareInterface *hw =
10043        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10044    if (!hw) {
10045        LOGE("NULL camera device");
10046        return -EINVAL;
10047    }
10048
10049    int rc = hw->processCaptureRequest(request);
10050    LOGD("X");
10051    return rc;
10052}
10053
10054/*===========================================================================
10055 * FUNCTION   : dump
10056 *
10057 * DESCRIPTION:
10058 *
10059 * PARAMETERS :
10060 *
10061 *
10062 * RETURN     :
10063 *==========================================================================*/
10064
10065void QCamera3HardwareInterface::dump(
10066                const struct camera3_device *device, int fd)
10067{
10068    /* Log level property is read when "adb shell dumpsys media.camera" is
10069       called so that the log level can be controlled without restarting
10070       the media server */
10071    getLogLevel();
10072
10073    LOGD("E");
10074    QCamera3HardwareInterface *hw =
10075        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10076    if (!hw) {
10077        LOGE("NULL camera device");
10078        return;
10079    }
10080
10081    hw->dump(fd);
10082    LOGD("X");
10083    return;
10084}
10085
10086/*===========================================================================
10087 * FUNCTION   : flush
10088 *
10089 * DESCRIPTION:
10090 *
10091 * PARAMETERS :
10092 *
10093 *
10094 * RETURN     :
10095 *==========================================================================*/
10096
10097int QCamera3HardwareInterface::flush(
10098                const struct camera3_device *device)
10099{
10100    int rc;
10101    LOGD("E");
10102    QCamera3HardwareInterface *hw =
10103        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10104    if (!hw) {
10105        LOGE("NULL camera device");
10106        return -EINVAL;
10107    }
10108
10109    pthread_mutex_lock(&hw->mMutex);
10110    // Validate current state
10111    switch (hw->mState) {
10112        case STARTED:
10113            /* valid state */
10114            break;
10115
10116        case ERROR:
10117            pthread_mutex_unlock(&hw->mMutex);
10118            hw->handleCameraDeviceError();
10119            return -ENODEV;
10120
10121        default:
10122            LOGI("Flush returned during state %d", hw->mState);
10123            pthread_mutex_unlock(&hw->mMutex);
10124            return 0;
10125    }
10126    pthread_mutex_unlock(&hw->mMutex);
10127
10128    rc = hw->flush(true /* restart channels */ );
10129    LOGD("X");
10130    return rc;
10131}
10132
10133/*===========================================================================
10134 * FUNCTION   : close_camera_device
10135 *
10136 * DESCRIPTION:
10137 *
10138 * PARAMETERS :
10139 *
10140 *
10141 * RETURN     :
10142 *==========================================================================*/
10143int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10144{
10145    int ret = NO_ERROR;
10146    QCamera3HardwareInterface *hw =
10147        reinterpret_cast<QCamera3HardwareInterface *>(
10148            reinterpret_cast<camera3_device_t *>(device)->priv);
10149    if (!hw) {
10150        LOGE("NULL camera device");
10151        return BAD_VALUE;
10152    }
10153
10154    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10155    delete hw;
10156    LOGI("[KPI Perf]: X");
10157    return ret;
10158}
10159
10160/*===========================================================================
10161 * FUNCTION   : getWaveletDenoiseProcessPlate
10162 *
10163 * DESCRIPTION: query wavelet denoise process plate
10164 *
10165 * PARAMETERS : None
10166 *
10167 * RETURN     : WNR prcocess plate value
10168 *==========================================================================*/
10169cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10170{
10171    char prop[PROPERTY_VALUE_MAX];
10172    memset(prop, 0, sizeof(prop));
10173    property_get("persist.denoise.process.plates", prop, "0");
10174    int processPlate = atoi(prop);
10175    switch(processPlate) {
10176    case 0:
10177        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10178    case 1:
10179        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10180    case 2:
10181        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10182    case 3:
10183        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10184    default:
10185        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10186    }
10187}
10188
10189
10190/*===========================================================================
10191 * FUNCTION   : getTemporalDenoiseProcessPlate
10192 *
10193 * DESCRIPTION: query temporal denoise process plate
10194 *
10195 * PARAMETERS : None
10196 *
10197 * RETURN     : TNR prcocess plate value
10198 *==========================================================================*/
10199cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10200{
10201    char prop[PROPERTY_VALUE_MAX];
10202    memset(prop, 0, sizeof(prop));
10203    property_get("persist.tnr.process.plates", prop, "0");
10204    int processPlate = atoi(prop);
10205    switch(processPlate) {
10206    case 0:
10207        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10208    case 1:
10209        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10210    case 2:
10211        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10212    case 3:
10213        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10214    default:
10215        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10216    }
10217}
10218
10219
10220/*===========================================================================
10221 * FUNCTION   : extractSceneMode
10222 *
10223 * DESCRIPTION: Extract scene mode from frameworks set metadata
10224 *
10225 * PARAMETERS :
10226 *      @frame_settings: CameraMetadata reference
10227 *      @metaMode: ANDROID_CONTORL_MODE
10228 *      @hal_metadata: hal metadata structure
10229 *
10230 * RETURN     : None
10231 *==========================================================================*/
10232int32_t QCamera3HardwareInterface::extractSceneMode(
10233        const CameraMetadata &frame_settings, uint8_t metaMode,
10234        metadata_buffer_t *hal_metadata)
10235{
10236    int32_t rc = NO_ERROR;
10237
10238    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10239        camera_metadata_ro_entry entry =
10240                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10241        if (0 == entry.count)
10242            return rc;
10243
10244        uint8_t fwk_sceneMode = entry.data.u8[0];
10245
10246        int val = lookupHalName(SCENE_MODES_MAP,
10247                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10248                fwk_sceneMode);
10249        if (NAME_NOT_FOUND != val) {
10250            uint8_t sceneMode = (uint8_t)val;
10251            LOGD("sceneMode: %d", sceneMode);
10252            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10253                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10254                rc = BAD_VALUE;
10255            }
10256        }
10257    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10258            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10259        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10260        LOGD("sceneMode: %d", sceneMode);
10261        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10262                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10263            rc = BAD_VALUE;
10264        }
10265    }
10266    return rc;
10267}
10268
10269/*===========================================================================
10270 * FUNCTION   : needRotationReprocess
10271 *
10272 * DESCRIPTION: if rotation needs to be done by reprocess in pp
10273 *
10274 * PARAMETERS : none
10275 *
10276 * RETURN     : true: needed
10277 *              false: no need
10278 *==========================================================================*/
10279bool QCamera3HardwareInterface::needRotationReprocess()
10280{
10281    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10282        // current rotation is not zero, and pp has the capability to process rotation
10283        LOGH("need do reprocess for rotation");
10284        return true;
10285    }
10286
10287    return false;
10288}
10289
10290/*===========================================================================
10291 * FUNCTION   : needReprocess
10292 *
10293 * DESCRIPTION: if reprocess in needed
10294 *
10295 * PARAMETERS : none
10296 *
10297 * RETURN     : true: needed
10298 *              false: no need
10299 *==========================================================================*/
10300bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10301{
10302    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10303        // TODO: add for ZSL HDR later
10304        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10305        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10306            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10307            return true;
10308        } else {
10309            LOGH("already post processed frame");
10310            return false;
10311        }
10312    }
10313    return needRotationReprocess();
10314}
10315
10316/*===========================================================================
10317 * FUNCTION   : needJpegExifRotation
10318 *
10319 * DESCRIPTION: if rotation from jpeg is needed
10320 *
10321 * PARAMETERS : none
10322 *
10323 * RETURN     : true: needed
10324 *              false: no need
10325 *==========================================================================*/
10326bool QCamera3HardwareInterface::needJpegExifRotation()
10327{
10328   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10329    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10330       LOGD("Need use Jpeg EXIF Rotation");
10331       return true;
10332    }
10333    return false;
10334}
10335
10336/*===========================================================================
10337 * FUNCTION   : addOfflineReprocChannel
10338 *
10339 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10340 *              coming from input channel
10341 *
10342 * PARAMETERS :
10343 *   @config  : reprocess configuration
10344 *   @inputChHandle : pointer to the input (source) channel
10345 *
10346 *
10347 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
10348 *==========================================================================*/
10349QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10350        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10351{
10352    int32_t rc = NO_ERROR;
10353    QCamera3ReprocessChannel *pChannel = NULL;
10354
10355    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10356            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
10357            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10358    if (NULL == pChannel) {
10359        LOGE("no mem for reprocess channel");
10360        return NULL;
10361    }
10362
10363    rc = pChannel->initialize(IS_TYPE_NONE);
10364    if (rc != NO_ERROR) {
10365        LOGE("init reprocess channel failed, ret = %d", rc);
10366        delete pChannel;
10367        return NULL;
10368    }
10369
10370    // pp feature config
10371    cam_pp_feature_config_t pp_config;
10372    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10373
10374    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10375    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10376            & CAM_QCOM_FEATURE_DSDN) {
10377        //Use CPP CDS incase h/w supports it.
10378        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10379        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10380    }
10381    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10382        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10383    }
10384
10385    rc = pChannel->addReprocStreamsFromSource(pp_config,
10386            config,
10387            IS_TYPE_NONE,
10388            mMetadataChannel);
10389
10390    if (rc != NO_ERROR) {
10391        delete pChannel;
10392        return NULL;
10393    }
10394    return pChannel;
10395}
10396
10397/*===========================================================================
10398 * FUNCTION   : getMobicatMask
10399 *
10400 * DESCRIPTION: returns mobicat mask
10401 *
10402 * PARAMETERS : none
10403 *
10404 * RETURN     : mobicat mask
10405 *
10406 *==========================================================================*/
10407uint8_t QCamera3HardwareInterface::getMobicatMask()
10408{
10409    return m_MobicatMask;
10410}
10411
10412/*===========================================================================
10413 * FUNCTION   : setMobicat
10414 *
10415 * DESCRIPTION: set Mobicat on/off.
10416 *
10417 * PARAMETERS :
10418 *   @params  : none
10419 *
10420 * RETURN     : int32_t type of status
10421 *              NO_ERROR  -- success
10422 *              none-zero failure code
10423 *==========================================================================*/
10424int32_t QCamera3HardwareInterface::setMobicat()
10425{
10426    char value [PROPERTY_VALUE_MAX];
10427    property_get("persist.camera.mobicat", value, "0");
10428    int32_t ret = NO_ERROR;
10429    uint8_t enableMobi = (uint8_t)atoi(value);
10430
10431    if (enableMobi) {
10432        tune_cmd_t tune_cmd;
10433        tune_cmd.type = SET_RELOAD_CHROMATIX;
10434        tune_cmd.module = MODULE_ALL;
10435        tune_cmd.value = TRUE;
10436        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10437                CAM_INTF_PARM_SET_VFE_COMMAND,
10438                tune_cmd);
10439
10440        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10441                CAM_INTF_PARM_SET_PP_COMMAND,
10442                tune_cmd);
10443    }
10444    m_MobicatMask = enableMobi;
10445
10446    return ret;
10447}
10448
10449/*===========================================================================
10450* FUNCTION   : getLogLevel
10451*
10452* DESCRIPTION: Reads the log level property into a variable
10453*
10454* PARAMETERS :
10455*   None
10456*
10457* RETURN     :
10458*   None
10459*==========================================================================*/
10460void QCamera3HardwareInterface::getLogLevel()
10461{
10462    char prop[PROPERTY_VALUE_MAX];
10463    uint32_t globalLogLevel = 0;
10464
10465    property_get("persist.camera.hal.debug", prop, "0");
10466    int val = atoi(prop);
10467    if (0 <= val) {
10468        gCamHal3LogLevel = (uint32_t)val;
10469    }
10470
10471    property_get("persist.camera.kpi.debug", prop, "1");
10472    gKpiDebugLevel = atoi(prop);
10473
10474    property_get("persist.camera.global.debug", prop, "0");
10475    val = atoi(prop);
10476    if (0 <= val) {
10477        globalLogLevel = (uint32_t)val;
10478    }
10479
10480    /* Highest log level among hal.logs and global.logs is selected */
10481    if (gCamHal3LogLevel < globalLogLevel)
10482        gCamHal3LogLevel = globalLogLevel;
10483
10484    return;
10485}
10486
10487/*===========================================================================
10488 * FUNCTION   : validateStreamRotations
10489 *
10490 * DESCRIPTION: Check if the rotations requested are supported
10491 *
10492 * PARAMETERS :
10493 *   @stream_list : streams to be configured
10494 *
10495 * RETURN     : NO_ERROR on success
10496 *              -EINVAL on failure
10497 *
10498 *==========================================================================*/
10499int QCamera3HardwareInterface::validateStreamRotations(
10500        camera3_stream_configuration_t *streamList)
10501{
10502    int rc = NO_ERROR;
10503
10504    /*
10505    * Loop through all streams requested in configuration
10506    * Check if unsupported rotations have been requested on any of them
10507    */
10508    for (size_t j = 0; j < streamList->num_streams; j++){
10509        camera3_stream_t *newStream = streamList->streams[j];
10510
10511        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10512        bool isImplDef = (newStream->format ==
10513                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10514        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10515                isImplDef);
10516
10517        if (isRotated && (!isImplDef || isZsl)) {
10518            LOGE("Error: Unsupported rotation of %d requested for stream"
10519                    "type:%d and stream format:%d",
10520                    newStream->rotation, newStream->stream_type,
10521                    newStream->format);
10522            rc = -EINVAL;
10523            break;
10524        }
10525    }
10526
10527    return rc;
10528}
10529
10530/*===========================================================================
10531* FUNCTION   : getFlashInfo
10532*
10533* DESCRIPTION: Retrieve information about whether the device has a flash.
10534*
10535* PARAMETERS :
10536*   @cameraId  : Camera id to query
10537*   @hasFlash  : Boolean indicating whether there is a flash device
10538*                associated with given camera
10539*   @flashNode : If a flash device exists, this will be its device node.
10540*
10541* RETURN     :
10542*   None
10543*==========================================================================*/
10544void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10545        bool& hasFlash,
10546        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10547{
10548    cam_capability_t* camCapability = gCamCapability[cameraId];
10549    if (NULL == camCapability) {
10550        hasFlash = false;
10551        flashNode[0] = '\0';
10552    } else {
10553        hasFlash = camCapability->flash_available;
10554        strlcpy(flashNode,
10555                (char*)camCapability->flash_dev_name,
10556                QCAMERA_MAX_FILEPATH_LENGTH);
10557    }
10558}
10559
10560/*===========================================================================
10561* FUNCTION   : getEepromVersionInfo
10562*
10563* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10564*
10565* PARAMETERS : None
10566*
10567* RETURN     : string describing EEPROM version
10568*              "\0" if no such info available
10569*==========================================================================*/
10570const char *QCamera3HardwareInterface::getEepromVersionInfo()
10571{
10572    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10573}
10574
10575/*===========================================================================
10576* FUNCTION   : getLdafCalib
10577*
10578* DESCRIPTION: Retrieve Laser AF calibration data
10579*
10580* PARAMETERS : None
10581*
10582* RETURN     : Two uint32_t describing laser AF calibration data
10583*              NULL if none is available.
10584*==========================================================================*/
10585const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10586{
10587    if (mLdafCalibExist) {
10588        return &mLdafCalib[0];
10589    } else {
10590        return NULL;
10591    }
10592}
10593
10594/*===========================================================================
10595 * FUNCTION   : dynamicUpdateMetaStreamInfo
10596 *
10597 * DESCRIPTION: This function:
10598 *             (1) stops all the channels
10599 *             (2) returns error on pending requests and buffers
10600 *             (3) sends metastream_info in setparams
10601 *             (4) starts all channels
10602 *             This is useful when sensor has to be restarted to apply any
10603 *             settings such as frame rate from a different sensor mode
10604 *
10605 * PARAMETERS : None
10606 *
10607 * RETURN     : NO_ERROR on success
10608 *              Error codes on failure
10609 *
10610 *==========================================================================*/
10611int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10612{
10613    ATRACE_CALL();
10614    int rc = NO_ERROR;
10615
10616    LOGD("E");
10617
10618    rc = stopAllChannels();
10619    if (rc < 0) {
10620        LOGE("stopAllChannels failed");
10621        return rc;
10622    }
10623
10624    rc = notifyErrorForPendingRequests();
10625    if (rc < 0) {
10626        LOGE("notifyErrorForPendingRequests failed");
10627        return rc;
10628    }
10629
10630    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10631        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10632                "Format:%d",
10633                mStreamConfigInfo.type[i],
10634                mStreamConfigInfo.stream_sizes[i].width,
10635                mStreamConfigInfo.stream_sizes[i].height,
10636                mStreamConfigInfo.postprocess_mask[i],
10637                mStreamConfigInfo.format[i]);
10638    }
10639
10640    /* Send meta stream info once again so that ISP can start */
10641    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10642            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10643    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10644            mParameters);
10645    if (rc < 0) {
10646        LOGE("set Metastreaminfo failed. Sensor mode does not change");
10647    }
10648
10649    rc = startAllChannels();
10650    if (rc < 0) {
10651        LOGE("startAllChannels failed");
10652        return rc;
10653    }
10654
10655    LOGD("X");
10656    return rc;
10657}
10658
10659/*===========================================================================
10660 * FUNCTION   : stopAllChannels
10661 *
10662 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10663 *
10664 * PARAMETERS : None
10665 *
10666 * RETURN     : NO_ERROR on success
10667 *              Error codes on failure
10668 *
10669 *==========================================================================*/
10670int32_t QCamera3HardwareInterface::stopAllChannels()
10671{
10672    int32_t rc = NO_ERROR;
10673
10674    LOGD("Stopping all channels");
10675    // Stop the Streams/Channels
10676    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10677        it != mStreamInfo.end(); it++) {
10678        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10679        if (channel) {
10680            channel->stop();
10681        }
10682        (*it)->status = INVALID;
10683    }
10684
10685    if (mSupportChannel) {
10686        mSupportChannel->stop();
10687    }
10688    if (mAnalysisChannel) {
10689        mAnalysisChannel->stop();
10690    }
10691    if (mRawDumpChannel) {
10692        mRawDumpChannel->stop();
10693    }
10694    if (mMetadataChannel) {
10695        /* If content of mStreamInfo is not 0, there is metadata stream */
10696        mMetadataChannel->stop();
10697    }
10698
10699    LOGD("All channels stopped");
10700    return rc;
10701}
10702
10703/*===========================================================================
10704 * FUNCTION   : startAllChannels
10705 *
10706 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10707 *
10708 * PARAMETERS : None
10709 *
10710 * RETURN     : NO_ERROR on success
10711 *              Error codes on failure
10712 *
10713 *==========================================================================*/
10714int32_t QCamera3HardwareInterface::startAllChannels()
10715{
10716    int32_t rc = NO_ERROR;
10717
10718    LOGD("Start all channels ");
10719    // Start the Streams/Channels
10720    if (mMetadataChannel) {
10721        /* If content of mStreamInfo is not 0, there is metadata stream */
10722        rc = mMetadataChannel->start();
10723        if (rc < 0) {
10724            LOGE("META channel start failed");
10725            return rc;
10726        }
10727    }
10728    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10729        it != mStreamInfo.end(); it++) {
10730        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10731        if (channel) {
10732            rc = channel->start();
10733            if (rc < 0) {
10734                LOGE("channel start failed");
10735                return rc;
10736            }
10737        }
10738    }
10739    if (mAnalysisChannel) {
10740        mAnalysisChannel->start();
10741    }
10742    if (mSupportChannel) {
10743        rc = mSupportChannel->start();
10744        if (rc < 0) {
10745            LOGE("Support channel start failed");
10746            return rc;
10747        }
10748    }
10749    if (mRawDumpChannel) {
10750        rc = mRawDumpChannel->start();
10751        if (rc < 0) {
10752            LOGE("RAW dump channel start failed");
10753            return rc;
10754        }
10755    }
10756
10757    LOGD("All channels started");
10758    return rc;
10759}
10760
10761/*===========================================================================
10762 * FUNCTION   : notifyErrorForPendingRequests
10763 *
10764 * DESCRIPTION: This function sends error for all the pending requests/buffers
10765 *
10766 * PARAMETERS : None
10767 *
10768 * RETURN     : Error codes
10769 *              NO_ERROR on success
10770 *
10771 *==========================================================================*/
10772int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10773{
10774    int32_t rc = NO_ERROR;
10775    unsigned int frameNum = 0;
10776    camera3_capture_result_t result;
10777    camera3_stream_buffer_t *pStream_Buf = NULL;
10778
10779    memset(&result, 0, sizeof(camera3_capture_result_t));
10780
10781    if (mPendingRequestsList.size() > 0) {
10782        pendingRequestIterator i = mPendingRequestsList.begin();
10783        frameNum = i->frame_number;
10784    } else {
10785        /* There might still be pending buffers even though there are
10786         no pending requests. Setting the frameNum to MAX so that
10787         all the buffers with smaller frame numbers are returned */
10788        frameNum = UINT_MAX;
10789    }
10790
10791    LOGH("Oldest frame num on mPendingRequestsList = %u",
10792       frameNum);
10793
10794    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10795            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10796
10797        if (req->frame_number < frameNum) {
10798            // Send Error notify to frameworks for each buffer for which
10799            // metadata buffer is already sent
10800            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10801                req->frame_number, req->mPendingBufferList.size());
10802
10803            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10804            if (NULL == pStream_Buf) {
10805                LOGE("No memory for pending buffers array");
10806                return NO_MEMORY;
10807            }
10808            memset(pStream_Buf, 0,
10809                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10810            result.result = NULL;
10811            result.frame_number = req->frame_number;
10812            result.num_output_buffers = req->mPendingBufferList.size();
10813            result.output_buffers = pStream_Buf;
10814
10815            size_t index = 0;
10816            for (auto info = req->mPendingBufferList.begin();
10817                info != req->mPendingBufferList.end(); ) {
10818
10819                camera3_notify_msg_t notify_msg;
10820                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10821                notify_msg.type = CAMERA3_MSG_ERROR;
10822                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10823                notify_msg.message.error.error_stream = info->stream;
10824                notify_msg.message.error.frame_number = req->frame_number;
10825                pStream_Buf[index].acquire_fence = -1;
10826                pStream_Buf[index].release_fence = -1;
10827                pStream_Buf[index].buffer = info->buffer;
10828                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10829                pStream_Buf[index].stream = info->stream;
10830                mCallbackOps->notify(mCallbackOps, &notify_msg);
10831                index++;
10832                // Remove buffer from list
10833                info = req->mPendingBufferList.erase(info);
10834            }
10835
10836            // Remove this request from Map
10837            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10838                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10839            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10840
10841            mCallbackOps->process_capture_result(mCallbackOps, &result);
10842
10843            delete [] pStream_Buf;
10844        } else {
10845
10846            // Go through the pending requests info and send error request to framework
10847            LOGE("Sending ERROR REQUEST for all pending requests");
10848            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10849
10850            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10851
10852            // Send error notify to frameworks
10853            camera3_notify_msg_t notify_msg;
10854            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10855            notify_msg.type = CAMERA3_MSG_ERROR;
10856            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10857            notify_msg.message.error.error_stream = NULL;
10858            notify_msg.message.error.frame_number = req->frame_number;
10859            mCallbackOps->notify(mCallbackOps, &notify_msg);
10860
10861            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10862            if (NULL == pStream_Buf) {
10863                LOGE("No memory for pending buffers array");
10864                return NO_MEMORY;
10865            }
10866            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10867
10868            result.result = NULL;
10869            result.frame_number = req->frame_number;
10870            result.input_buffer = i->input_buffer;
10871            result.num_output_buffers = req->mPendingBufferList.size();
10872            result.output_buffers = pStream_Buf;
10873
10874            size_t index = 0;
10875            for (auto info = req->mPendingBufferList.begin();
10876                info != req->mPendingBufferList.end(); ) {
10877                pStream_Buf[index].acquire_fence = -1;
10878                pStream_Buf[index].release_fence = -1;
10879                pStream_Buf[index].buffer = info->buffer;
10880                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10881                pStream_Buf[index].stream = info->stream;
10882                index++;
10883                // Remove buffer from list
10884                info = req->mPendingBufferList.erase(info);
10885            }
10886
10887            // Remove this request from Map
10888            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10889                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10890            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10891
10892            mCallbackOps->process_capture_result(mCallbackOps, &result);
10893            delete [] pStream_Buf;
10894            i = erasePendingRequest(i);
10895        }
10896    }
10897
10898    /* Reset pending frame Drop list and requests list */
10899    mPendingFrameDropList.clear();
10900
10901    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10902        req.mPendingBufferList.clear();
10903    }
10904    mPendingBuffersMap.mPendingBuffersInRequest.clear();
10905    mPendingReprocessResultList.clear();
10906    LOGH("Cleared all the pending buffers ");
10907
10908    return rc;
10909}
10910
10911bool QCamera3HardwareInterface::isOnEncoder(
10912        const cam_dimension_t max_viewfinder_size,
10913        uint32_t width, uint32_t height)
10914{
10915    return (width > (uint32_t)max_viewfinder_size.width ||
10916            height > (uint32_t)max_viewfinder_size.height);
10917}
10918
10919/*===========================================================================
10920 * FUNCTION   : setBundleInfo
10921 *
10922 * DESCRIPTION: Set bundle info for all streams that are bundle.
10923 *
10924 * PARAMETERS : None
10925 *
10926 * RETURN     : NO_ERROR on success
10927 *              Error codes on failure
10928 *==========================================================================*/
10929int32_t QCamera3HardwareInterface::setBundleInfo()
10930{
10931    int32_t rc = NO_ERROR;
10932
10933    if (mChannelHandle) {
10934        cam_bundle_config_t bundleInfo;
10935        memset(&bundleInfo, 0, sizeof(bundleInfo));
10936        rc = mCameraHandle->ops->get_bundle_info(
10937                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10938        if (rc != NO_ERROR) {
10939            LOGE("get_bundle_info failed");
10940            return rc;
10941        }
10942        if (mAnalysisChannel) {
10943            mAnalysisChannel->setBundleInfo(bundleInfo);
10944        }
10945        if (mSupportChannel) {
10946            mSupportChannel->setBundleInfo(bundleInfo);
10947        }
10948        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10949                it != mStreamInfo.end(); it++) {
10950            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10951            channel->setBundleInfo(bundleInfo);
10952        }
10953        if (mRawDumpChannel) {
10954            mRawDumpChannel->setBundleInfo(bundleInfo);
10955        }
10956    }
10957
10958    return rc;
10959}
10960
10961/*===========================================================================
10962 * FUNCTION   : get_num_overall_buffers
10963 *
10964 * DESCRIPTION: Estimate number of pending buffers across all requests.
10965 *
10966 * PARAMETERS : None
10967 *
10968 * RETURN     : Number of overall pending buffers
10969 *
10970 *==========================================================================*/
10971uint32_t PendingBuffersMap::get_num_overall_buffers()
10972{
10973    uint32_t sum_buffers = 0;
10974    for (auto &req : mPendingBuffersInRequest) {
10975        sum_buffers += req.mPendingBufferList.size();
10976    }
10977    return sum_buffers;
10978}
10979
10980/*===========================================================================
10981 * FUNCTION   : removeBuf
10982 *
10983 * DESCRIPTION: Remove a matching buffer from tracker.
10984 *
10985 * PARAMETERS : @buffer: image buffer for the callback
10986 *
10987 * RETURN     : None
10988 *
10989 *==========================================================================*/
10990void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10991{
10992    bool buffer_found = false;
10993    for (auto req = mPendingBuffersInRequest.begin();
10994            req != mPendingBuffersInRequest.end(); req++) {
10995        for (auto k = req->mPendingBufferList.begin();
10996                k != req->mPendingBufferList.end(); k++ ) {
10997            if (k->buffer == buffer) {
10998                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10999                        req->frame_number, buffer);
11000                k = req->mPendingBufferList.erase(k);
11001                if (req->mPendingBufferList.empty()) {
11002                    // Remove this request from Map
11003                    req = mPendingBuffersInRequest.erase(req);
11004                }
11005                buffer_found = true;
11006                break;
11007            }
11008        }
11009        if (buffer_found) {
11010            break;
11011        }
11012    }
11013    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11014            get_num_overall_buffers());
11015}
11016
11017/*===========================================================================
11018 * FUNCTION   : setPAAFSupport
11019 *
11020 * DESCRIPTION: Set the preview-assisted auto focus support bit in
11021 *              feature mask according to stream type and filter
11022 *              arrangement
11023 *
11024 * PARAMETERS : @feature_mask: current feature mask, which may be modified
11025 *              @stream_type: stream type
11026 *              @filter_arrangement: filter arrangement
11027 *
11028 * RETURN     : None
11029 *==========================================================================*/
11030void QCamera3HardwareInterface::setPAAFSupport(
11031        cam_feature_mask_t& feature_mask,
11032        cam_stream_type_t stream_type,
11033        cam_color_filter_arrangement_t filter_arrangement)
11034{
11035    switch (filter_arrangement) {
11036    case CAM_FILTER_ARRANGEMENT_RGGB:
11037    case CAM_FILTER_ARRANGEMENT_GRBG:
11038    case CAM_FILTER_ARRANGEMENT_GBRG:
11039    case CAM_FILTER_ARRANGEMENT_BGGR:
11040        if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11041                (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11042            feature_mask |= CAM_QCOM_FEATURE_PAAF;
11043        }
11044        break;
11045    case CAM_FILTER_ARRANGEMENT_Y:
11046        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11047            feature_mask |= CAM_QCOM_FEATURE_PAAF;
11048        }
11049        break;
11050    default:
11051        break;
11052    }
11053    LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11054            feature_mask, stream_type, filter_arrangement);
11055
11056
11057}
11058
11059/*===========================================================================
11060 * FUNCTION   : adjustBlackLevelForCFA
11061 *
11062 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
11063 *              of bayer CFA (Color Filter Array).
11064 *
11065 * PARAMETERS : @input: black level pattern in the order of RGGB
11066 *              @output: black level pattern in the order of CFA
11067 *              @color_arrangement: CFA color arrangement
11068 *
11069 * RETURN     : None
11070 *==========================================================================*/
11071template<typename T>
11072void QCamera3HardwareInterface::adjustBlackLevelForCFA(
11073        T input[BLACK_LEVEL_PATTERN_CNT],
11074        T output[BLACK_LEVEL_PATTERN_CNT],
11075        cam_color_filter_arrangement_t color_arrangement)
11076{
11077    switch (color_arrangement) {
11078    case CAM_FILTER_ARRANGEMENT_GRBG:
11079        output[0] = input[1];
11080        output[1] = input[0];
11081        output[2] = input[3];
11082        output[3] = input[2];
11083        break;
11084    case CAM_FILTER_ARRANGEMENT_GBRG:
11085        output[0] = input[2];
11086        output[1] = input[3];
11087        output[2] = input[0];
11088        output[3] = input[1];
11089        break;
11090    case CAM_FILTER_ARRANGEMENT_BGGR:
11091        output[0] = input[3];
11092        output[1] = input[2];
11093        output[2] = input[1];
11094        output[3] = input[0];
11095        break;
11096    case CAM_FILTER_ARRANGEMENT_RGGB:
11097        output[0] = input[0];
11098        output[1] = input[1];
11099        output[2] = input[2];
11100        output[3] = input[3];
11101        break;
11102    default:
11103        LOGE("Invalid color arrangement to derive dynamic blacklevel");
11104        break;
11105    }
11106}
11107}; //end namespace qcamera
11108