QCamera3HWI.cpp revision aa06a7751917bd694f475dc8110c8150c4e4ad72
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <sync/sync.h>
44#include "gralloc_priv.h"
45
46// Display dependencies
47#include "qdMetaData.h"
48
49// Camera dependencies
50#include "android/QCamera3External.h"
51#include "util/QCameraFlash.h"
52#include "QCamera3HWI.h"
53#include "QCamera3VendorTags.h"
54#include "QCameraTrace.h"
55
56extern "C" {
57#include "mm_camera_dbg.h"
58}
59
60using namespace android;
61
62namespace qcamera {
63
64#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
65
66#define EMPTY_PIPELINE_DELAY 2
67#define PARTIAL_RESULT_COUNT 2
68#define FRAME_SKIP_DELAY     0
69
70#define MAX_VALUE_8BIT ((1<<8)-1)
71#define MAX_VALUE_10BIT ((1<<10)-1)
72#define MAX_VALUE_12BIT ((1<<12)-1)
73
74#define VIDEO_4K_WIDTH  3840
75#define VIDEO_4K_HEIGHT 2160
76
77#define MAX_EIS_WIDTH 3840
78#define MAX_EIS_HEIGHT 2160
79
80#define MAX_RAW_STREAMS        1
81#define MAX_STALLING_STREAMS   1
82#define MAX_PROCESSED_STREAMS  3
83/* Batch mode is enabled only if FPS set is equal to or greater than this */
84#define MIN_FPS_FOR_BATCH_MODE (120)
85#define PREVIEW_FPS_FOR_HFR    (30)
86#define DEFAULT_VIDEO_FPS      (30.0)
87#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
88#define MAX_HFR_BATCH_SIZE     (8)
89#define REGIONS_TUPLE_COUNT    5
90#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
91#define BURST_REPROCESS_PERF_TIME_OUT  (1000) // milliseconds
92// Set a threshold for detection of missing buffers //seconds
93#define MISSING_REQUEST_BUF_TIMEOUT 3
94#define FLUSH_TIMEOUT 3
95#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
96
97#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
98                                              CAM_QCOM_FEATURE_CROP |\
99                                              CAM_QCOM_FEATURE_ROTATION |\
100                                              CAM_QCOM_FEATURE_SHARPNESS |\
101                                              CAM_QCOM_FEATURE_SCALE |\
102                                              CAM_QCOM_FEATURE_CAC |\
103                                              CAM_QCOM_FEATURE_CDS )
104/* Per configuration size for static metadata length*/
105#define PER_CONFIGURATION_SIZE_3 (3)
106
107#define TIMEOUT_NEVER -1
108
109cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
110const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
111extern pthread_mutex_t gCamLock;
112volatile uint32_t gCamHal3LogLevel = 1;
113extern uint8_t gNumCameraSessions;
114
115const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
116    {"On",  CAM_CDS_MODE_ON},
117    {"Off", CAM_CDS_MODE_OFF},
118    {"Auto",CAM_CDS_MODE_AUTO}
119};
120
121const QCamera3HardwareInterface::QCameraMap<
122        camera_metadata_enum_android_control_effect_mode_t,
123        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
124    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
125    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
126    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
127    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
128    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
129    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
130    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
132    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
133};
134
135const QCamera3HardwareInterface::QCameraMap<
136        camera_metadata_enum_android_control_awb_mode_t,
137        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
138    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
139    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
140    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
141    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
142    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
143    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
145    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
146    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
147};
148
149const QCamera3HardwareInterface::QCameraMap<
150        camera_metadata_enum_android_control_scene_mode_t,
151        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
152    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
153    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
154    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
155    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
157    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
158    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
159    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
160    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
161    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
162    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
163    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
164    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
165    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
166    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
167    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
168};
169
170const QCamera3HardwareInterface::QCameraMap<
171        camera_metadata_enum_android_control_af_mode_t,
172        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
174    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
175    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
176    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
177    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
179    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
180};
181
182const QCamera3HardwareInterface::QCameraMap<
183        camera_metadata_enum_android_color_correction_aberration_mode_t,
184        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
185    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
186            CAM_COLOR_CORRECTION_ABERRATION_OFF },
187    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
188            CAM_COLOR_CORRECTION_ABERRATION_FAST },
189    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
190            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194        camera_metadata_enum_android_control_ae_antibanding_mode_t,
195        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
199    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
200};
201
202const QCamera3HardwareInterface::QCameraMap<
203        camera_metadata_enum_android_control_ae_mode_t,
204        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
205    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
207    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
208    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
209    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213        camera_metadata_enum_android_flash_mode_t,
214        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
215    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
216    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
217    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221        camera_metadata_enum_android_statistics_face_detect_mode_t,
222        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
225    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
226};
227
228const QCamera3HardwareInterface::QCameraMap<
229        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
230        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
231    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
232      CAM_FOCUS_UNCALIBRATED },
233    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
234      CAM_FOCUS_APPROXIMATE },
235    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
236      CAM_FOCUS_CALIBRATED }
237};
238
239const QCamera3HardwareInterface::QCameraMap<
240        camera_metadata_enum_android_lens_state_t,
241        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
242    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
243    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
244};
245
246const int32_t available_thumbnail_sizes[] = {0, 0,
247                                             176, 144,
248                                             240, 144,
249                                             256, 144,
250                                             240, 160,
251                                             256, 154,
252                                             240, 240,
253                                             320, 240};
254
255const QCamera3HardwareInterface::QCameraMap<
256        camera_metadata_enum_android_sensor_test_pattern_mode_t,
257        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
258    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
259    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
260    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
261    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
262    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
263    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
264};
265
266/* Since there is no mapping for all the options some Android enum are not listed.
267 * Also, the order in this list is important because while mapping from HAL to Android it will
268 * traverse from lower to higher index which means that for HAL values that are map to different
269 * Android values, the traverse logic will select the first one found.
270 */
271const QCamera3HardwareInterface::QCameraMap<
272        camera_metadata_enum_android_sensor_reference_illuminant1_t,
273        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
276    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
277    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
278    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
279    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
280    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
289    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
294    { 60, CAM_HFR_MODE_60FPS},
295    { 90, CAM_HFR_MODE_90FPS},
296    { 120, CAM_HFR_MODE_120FPS},
297    { 150, CAM_HFR_MODE_150FPS},
298    { 180, CAM_HFR_MODE_180FPS},
299    { 210, CAM_HFR_MODE_210FPS},
300    { 240, CAM_HFR_MODE_240FPS},
301    { 480, CAM_HFR_MODE_480FPS},
302};
303
304camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
305    .initialize                         = QCamera3HardwareInterface::initialize,
306    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
307    .register_stream_buffers            = NULL,
308    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
309    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
310    .get_metadata_vendor_tag_ops        = NULL,
311    .dump                               = QCamera3HardwareInterface::dump,
312    .flush                              = QCamera3HardwareInterface::flush,
313    .reserved                           = {0},
314};
315
316// initialise to some default value
317uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
318
319/*===========================================================================
320 * FUNCTION   : QCamera3HardwareInterface
321 *
322 * DESCRIPTION: constructor of QCamera3HardwareInterface
323 *
324 * PARAMETERS :
325 *   @cameraId  : camera ID
326 *
327 * RETURN     : none
328 *==========================================================================*/
329QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
330        const camera_module_callbacks_t *callbacks)
331    : mCameraId(cameraId),
332      mCameraHandle(NULL),
333      mCameraInitialized(false),
334      mCallbackOps(NULL),
335      mMetadataChannel(NULL),
336      mPictureChannel(NULL),
337      mRawChannel(NULL),
338      mSupportChannel(NULL),
339      mAnalysisChannel(NULL),
340      mRawDumpChannel(NULL),
341      mDummyBatchChannel(NULL),
342      m_perfLock(),
343      mCommon(),
344      mChannelHandle(0),
345      mFirstConfiguration(true),
346      mFlush(false),
347      mFlushPerf(false),
348      mParamHeap(NULL),
349      mParameters(NULL),
350      mPrevParameters(NULL),
351      m_bIsVideo(false),
352      m_bIs4KVideo(false),
353      m_bEisSupportedSize(false),
354      m_bEisEnable(false),
355      m_MobicatMask(0),
356      mMinProcessedFrameDuration(0),
357      mMinJpegFrameDuration(0),
358      mMinRawFrameDuration(0),
359      mMetaFrameCount(0U),
360      mUpdateDebugLevel(false),
361      mCallbacks(callbacks),
362      mCaptureIntent(0),
363      mCacMode(0),
364      mHybridAeEnable(0),
365      /* DevCamDebug metadata internal m control*/
366      mDevCamDebugMetaEnable(0),
367      /* DevCamDebug metadata end */
368      mBatchSize(0),
369      mToBeQueuedVidBufs(0),
370      mHFRVideoFps(DEFAULT_VIDEO_FPS),
371      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
372      mFirstFrameNumberInBatch(0),
373      mNeedSensorRestart(false),
374      mLdafCalibExist(false),
375      mPowerHintEnabled(false),
376      mLastCustIntentFrmNum(-1),
377      mState(CLOSED),
378      mIsDeviceLinked(false),
379      mIsMainCamera(true),
380      mLinkedCameraId(0),
381      m_pRelCamSyncHeap(NULL),
382      m_pRelCamSyncBuf(NULL)
383{
384    getLogLevel();
385    m_perfLock.lock_init();
386    mCommon.init(gCamCapability[cameraId]);
387    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
388    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
389    mCameraDevice.common.close = close_camera_device;
390    mCameraDevice.ops = &mCameraOps;
391    mCameraDevice.priv = this;
392    gCamCapability[cameraId]->version = CAM_HAL_V3;
393    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
394    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
395    gCamCapability[cameraId]->min_num_pp_bufs = 3;
396
397    pthread_cond_init(&mBuffersCond, NULL);
398
399    pthread_cond_init(&mRequestCond, NULL);
400    mPendingLiveRequest = 0;
401    mCurrentRequestId = -1;
402    pthread_mutex_init(&mMutex, NULL);
403
404    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
405        mDefaultMetadata[i] = NULL;
406
407    // Getting system props of different kinds
408    char prop[PROPERTY_VALUE_MAX];
409    memset(prop, 0, sizeof(prop));
410    property_get("persist.camera.raw.dump", prop, "0");
411    mEnableRawDump = atoi(prop);
412    if (mEnableRawDump)
413        LOGD("Raw dump from Camera HAL enabled");
414
415    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
416    memset(mLdafCalib, 0, sizeof(mLdafCalib));
417
418    memset(prop, 0, sizeof(prop));
419    property_get("persist.camera.tnr.preview", prop, "0");
420    m_bTnrPreview = (uint8_t)atoi(prop);
421
422    memset(prop, 0, sizeof(prop));
423    property_get("persist.camera.tnr.video", prop, "0");
424    m_bTnrVideo = (uint8_t)atoi(prop);
425
426    //Load and read GPU library.
427    lib_surface_utils = NULL;
428    LINK_get_surface_pixel_alignment = NULL;
429    mSurfaceStridePadding = CAM_PAD_TO_32;
430    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
431    if (lib_surface_utils) {
432        *(void **)&LINK_get_surface_pixel_alignment =
433                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
434         if (LINK_get_surface_pixel_alignment) {
435             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
436         }
437         dlclose(lib_surface_utils);
438    }
439}
440
441/*===========================================================================
442 * FUNCTION   : ~QCamera3HardwareInterface
443 *
444 * DESCRIPTION: destructor of QCamera3HardwareInterface
445 *
446 * PARAMETERS : none
447 *
448 * RETURN     : none
449 *==========================================================================*/
450QCamera3HardwareInterface::~QCamera3HardwareInterface()
451{
452    LOGD("E");
453
454    /* Turn off current power hint before acquiring perfLock in case they
455     * conflict with each other */
456    disablePowerHint();
457
458    m_perfLock.lock_acq();
459
460    /* We need to stop all streams before deleting any stream */
461    if (mRawDumpChannel) {
462        mRawDumpChannel->stop();
463    }
464
465    // NOTE: 'camera3_stream_t *' objects are already freed at
466    //        this stage by the framework
467    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
468        it != mStreamInfo.end(); it++) {
469        QCamera3ProcessingChannel *channel = (*it)->channel;
470        if (channel) {
471            channel->stop();
472        }
473    }
474    if (mSupportChannel)
475        mSupportChannel->stop();
476
477    if (mAnalysisChannel) {
478        mAnalysisChannel->stop();
479    }
480    if (mMetadataChannel) {
481        mMetadataChannel->stop();
482    }
483    if (mChannelHandle) {
484        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
485                mChannelHandle);
486        LOGD("stopping channel %d", mChannelHandle);
487    }
488
489    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
490        it != mStreamInfo.end(); it++) {
491        QCamera3ProcessingChannel *channel = (*it)->channel;
492        if (channel)
493            delete channel;
494        free (*it);
495    }
496    if (mSupportChannel) {
497        delete mSupportChannel;
498        mSupportChannel = NULL;
499    }
500
501    if (mAnalysisChannel) {
502        delete mAnalysisChannel;
503        mAnalysisChannel = NULL;
504    }
505    if (mRawDumpChannel) {
506        delete mRawDumpChannel;
507        mRawDumpChannel = NULL;
508    }
509    if (mDummyBatchChannel) {
510        delete mDummyBatchChannel;
511        mDummyBatchChannel = NULL;
512    }
513    mPictureChannel = NULL;
514
515    if (mMetadataChannel) {
516        delete mMetadataChannel;
517        mMetadataChannel = NULL;
518    }
519
520    /* Clean up all channels */
521    if (mCameraInitialized) {
522        if(!mFirstConfiguration){
523            //send the last unconfigure
524            cam_stream_size_info_t stream_config_info;
525            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
526            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
527            stream_config_info.buffer_info.max_buffers =
528                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
529            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
530                    stream_config_info);
531            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
532            if (rc < 0) {
533                LOGE("set_parms failed for unconfigure");
534            }
535        }
536        deinitParameters();
537    }
538
539    if (mChannelHandle) {
540        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
541                mChannelHandle);
542        LOGH("deleting channel %d", mChannelHandle);
543        mChannelHandle = 0;
544    }
545
546    if (mState != CLOSED)
547        closeCamera();
548
549    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
550        req.mPendingBufferList.clear();
551    }
552    mPendingBuffersMap.mPendingBuffersInRequest.clear();
553    mPendingReprocessResultList.clear();
554    for (pendingRequestIterator i = mPendingRequestsList.begin();
555            i != mPendingRequestsList.end();) {
556        i = erasePendingRequest(i);
557    }
558    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
559        if (mDefaultMetadata[i])
560            free_camera_metadata(mDefaultMetadata[i]);
561
562    m_perfLock.lock_rel();
563    m_perfLock.lock_deinit();
564
565    pthread_cond_destroy(&mRequestCond);
566
567    pthread_cond_destroy(&mBuffersCond);
568
569    pthread_mutex_destroy(&mMutex);
570    LOGD("X");
571}
572
573/*===========================================================================
574 * FUNCTION   : erasePendingRequest
575 *
576 * DESCRIPTION: function to erase a desired pending request after freeing any
577 *              allocated memory
578 *
579 * PARAMETERS :
580 *   @i       : iterator pointing to pending request to be erased
581 *
582 * RETURN     : iterator pointing to the next request
583 *==========================================================================*/
584QCamera3HardwareInterface::pendingRequestIterator
585        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
586{
587    if (i->input_buffer != NULL) {
588        free(i->input_buffer);
589        i->input_buffer = NULL;
590    }
591    if (i->settings != NULL)
592        free_camera_metadata((camera_metadata_t*)i->settings);
593    return mPendingRequestsList.erase(i);
594}
595
596/*===========================================================================
597 * FUNCTION   : camEvtHandle
598 *
599 * DESCRIPTION: Function registered to mm-camera-interface to handle events
600 *
601 * PARAMETERS :
602 *   @camera_handle : interface layer camera handle
603 *   @evt           : ptr to event
604 *   @user_data     : user data ptr
605 *
606 * RETURN     : none
607 *==========================================================================*/
608void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
609                                          mm_camera_event_t *evt,
610                                          void *user_data)
611{
612    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
613    if (obj && evt) {
614        switch(evt->server_event_type) {
615            case CAM_EVENT_TYPE_DAEMON_DIED:
616                pthread_mutex_lock(&obj->mMutex);
617                obj->mState = ERROR;
618                pthread_mutex_unlock(&obj->mMutex);
619                LOGE("Fatal, camera daemon died");
620                break;
621
622            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
623                LOGD("HAL got request pull from Daemon");
624                pthread_mutex_lock(&obj->mMutex);
625                obj->mWokenUpByDaemon = true;
626                obj->unblockRequestIfNecessary();
627                pthread_mutex_unlock(&obj->mMutex);
628                break;
629
630            default:
631                LOGW("Warning: Unhandled event %d",
632                        evt->server_event_type);
633                break;
634        }
635    } else {
636        LOGE("NULL user_data/evt");
637    }
638}
639
640/*===========================================================================
641 * FUNCTION   : openCamera
642 *
643 * DESCRIPTION: open camera
644 *
645 * PARAMETERS :
646 *   @hw_device  : double ptr for camera device struct
647 *
648 * RETURN     : int32_t type of status
649 *              NO_ERROR  -- success
650 *              none-zero failure code
651 *==========================================================================*/
652int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
653{
654    int rc = 0;
655    if (mState != CLOSED) {
656        *hw_device = NULL;
657        return PERMISSION_DENIED;
658    }
659
660    m_perfLock.lock_acq();
661    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
662             mCameraId);
663
664    rc = openCamera();
665    if (rc == 0) {
666        *hw_device = &mCameraDevice.common;
667    } else
668        *hw_device = NULL;
669
670    m_perfLock.lock_rel();
671    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
672             mCameraId, rc);
673
674    if (rc == NO_ERROR) {
675        mState = OPENED;
676    }
677    return rc;
678}
679
680/*===========================================================================
681 * FUNCTION   : openCamera
682 *
683 * DESCRIPTION: open camera
684 *
685 * PARAMETERS : none
686 *
687 * RETURN     : int32_t type of status
688 *              NO_ERROR  -- success
689 *              none-zero failure code
690 *==========================================================================*/
691int QCamera3HardwareInterface::openCamera()
692{
693    int rc = 0;
694    char value[PROPERTY_VALUE_MAX];
695
696    KPI_ATRACE_CALL();
697    if (mCameraHandle) {
698        LOGE("Failure: Camera already opened");
699        return ALREADY_EXISTS;
700    }
701
702    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
703    if (rc < 0) {
704        LOGE("Failed to reserve flash for camera id: %d",
705                mCameraId);
706        return UNKNOWN_ERROR;
707    }
708
709    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
710    if (rc) {
711        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
712        return rc;
713    }
714
715    if (!mCameraHandle) {
716        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
717        return -ENODEV;
718    }
719
720    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
721            camEvtHandle, (void *)this);
722
723    if (rc < 0) {
724        LOGE("Error, failed to register event callback");
725        /* Not closing camera here since it is already handled in destructor */
726        return FAILED_TRANSACTION;
727    }
728
729    mExifParams.debug_params =
730            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
731    if (mExifParams.debug_params) {
732        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
733    } else {
734        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
735        return NO_MEMORY;
736    }
737    mFirstConfiguration = true;
738
739    //Notify display HAL that a camera session is active.
740    //But avoid calling the same during bootup because camera service might open/close
741    //cameras at boot time during its initialization and display service will also internally
742    //wait for camera service to initialize first while calling this display API, resulting in a
743    //deadlock situation. Since boot time camera open/close calls are made only to fetch
744    //capabilities, no need of this display bw optimization.
745    //Use "service.bootanim.exit" property to know boot status.
746    property_get("service.bootanim.exit", value, "0");
747    if (atoi(value) == 1) {
748        pthread_mutex_lock(&gCamLock);
749        if (gNumCameraSessions++ == 0) {
750            setCameraLaunchStatus(true);
751        }
752        pthread_mutex_unlock(&gCamLock);
753    }
754
755    //fill the session id needed while linking dual cam
756    pthread_mutex_lock(&gCamLock);
757    rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
758        &sessionId[mCameraId]);
759    pthread_mutex_unlock(&gCamLock);
760
761    if (rc < 0) {
762        LOGE("Error, failed to get sessiion id");
763        return UNKNOWN_ERROR;
764    } else {
765        //Allocate related cam sync buffer
766        //this is needed for the payload that goes along with bundling cmd for related
767        //camera use cases
768        m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
769        rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
770        if(rc != OK) {
771            rc = NO_MEMORY;
772            LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
773            return NO_MEMORY;
774        }
775
776        //Map memory for related cam sync buffer
777        rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
778                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
779                m_pRelCamSyncHeap->getFd(0),
780                sizeof(cam_sync_related_sensors_event_info_t));
781        if(rc < 0) {
782            LOGE("Dualcam: failed to map Related cam sync buffer");
783            rc = FAILED_TRANSACTION;
784            return NO_MEMORY;
785        }
786        m_pRelCamSyncBuf =
787                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
788    }
789
790    LOGH("mCameraId=%d",mCameraId);
791
792    return NO_ERROR;
793}
794
795/*===========================================================================
796 * FUNCTION   : closeCamera
797 *
798 * DESCRIPTION: close camera
799 *
800 * PARAMETERS : none
801 *
802 * RETURN     : int32_t type of status
803 *              NO_ERROR  -- success
804 *              none-zero failure code
805 *==========================================================================*/
806int QCamera3HardwareInterface::closeCamera()
807{
808    KPI_ATRACE_CALL();
809    int rc = NO_ERROR;
810    char value[PROPERTY_VALUE_MAX];
811
812    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
813             mCameraId);
814    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
815    mCameraHandle = NULL;
816
817    //reset session id to some invalid id
818    pthread_mutex_lock(&gCamLock);
819    sessionId[mCameraId] = 0xDEADBEEF;
820    pthread_mutex_unlock(&gCamLock);
821
822    //Notify display HAL that there is no active camera session
823    //but avoid calling the same during bootup. Refer to openCamera
824    //for more details.
825    property_get("service.bootanim.exit", value, "0");
826    if (atoi(value) == 1) {
827        pthread_mutex_lock(&gCamLock);
828        if (--gNumCameraSessions == 0) {
829            setCameraLaunchStatus(false);
830        }
831        pthread_mutex_unlock(&gCamLock);
832    }
833
834    if (NULL != m_pRelCamSyncHeap) {
835        m_pRelCamSyncHeap->deallocate();
836        delete m_pRelCamSyncHeap;
837        m_pRelCamSyncHeap = NULL;
838        m_pRelCamSyncBuf = NULL;
839    }
840
841    if (mExifParams.debug_params) {
842        free(mExifParams.debug_params);
843        mExifParams.debug_params = NULL;
844    }
845    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
846        LOGW("Failed to release flash for camera id: %d",
847                mCameraId);
848    }
849    mState = CLOSED;
850    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
851         mCameraId, rc);
852    return rc;
853}
854
855/*===========================================================================
856 * FUNCTION   : initialize
857 *
858 * DESCRIPTION: Initialize frameworks callback functions
859 *
860 * PARAMETERS :
861 *   @callback_ops : callback function to frameworks
862 *
863 * RETURN     :
864 *
865 *==========================================================================*/
866int QCamera3HardwareInterface::initialize(
867        const struct camera3_callback_ops *callback_ops)
868{
869    ATRACE_CALL();
870    int rc;
871
872    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
873    pthread_mutex_lock(&mMutex);
874
875    // Validate current state
876    switch (mState) {
877        case OPENED:
878            /* valid state */
879            break;
880        default:
881            LOGE("Invalid state %d", mState);
882            rc = -ENODEV;
883            goto err1;
884    }
885
886    rc = initParameters();
887    if (rc < 0) {
888        LOGE("initParamters failed %d", rc);
889        goto err1;
890    }
891    mCallbackOps = callback_ops;
892
893    mChannelHandle = mCameraHandle->ops->add_channel(
894            mCameraHandle->camera_handle, NULL, NULL, this);
895    if (mChannelHandle == 0) {
896        LOGE("add_channel failed");
897        rc = -ENOMEM;
898        pthread_mutex_unlock(&mMutex);
899        return rc;
900    }
901
902    pthread_mutex_unlock(&mMutex);
903    mCameraInitialized = true;
904    mState = INITIALIZED;
905    LOGI("X");
906    return 0;
907
908err1:
909    pthread_mutex_unlock(&mMutex);
910    return rc;
911}
912
913/*===========================================================================
914 * FUNCTION   : validateStreamDimensions
915 *
916 * DESCRIPTION: Check if the configuration requested are those advertised
917 *
918 * PARAMETERS :
919 *   @stream_list : streams to be configured
920 *
921 * RETURN     :
922 *
923 *==========================================================================*/
924int QCamera3HardwareInterface::validateStreamDimensions(
925        camera3_stream_configuration_t *streamList)
926{
927    int rc = NO_ERROR;
928    size_t count = 0;
929
930    camera3_stream_t *inputStream = NULL;
931    /*
932    * Loop through all streams to find input stream if it exists*
933    */
934    for (size_t i = 0; i< streamList->num_streams; i++) {
935        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
936            if (inputStream != NULL) {
937                LOGE("Error, Multiple input streams requested");
938                return -EINVAL;
939            }
940            inputStream = streamList->streams[i];
941        }
942    }
943    /*
944    * Loop through all streams requested in configuration
945    * Check if unsupported sizes have been requested on any of them
946    */
947    for (size_t j = 0; j < streamList->num_streams; j++) {
948        bool sizeFound = false;
949        camera3_stream_t *newStream = streamList->streams[j];
950
951        uint32_t rotatedHeight = newStream->height;
952        uint32_t rotatedWidth = newStream->width;
953        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
954                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
955            rotatedHeight = newStream->width;
956            rotatedWidth = newStream->height;
957        }
958
959        /*
960        * Sizes are different for each type of stream format check against
961        * appropriate table.
962        */
963        switch (newStream->format) {
964        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
965        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
966        case HAL_PIXEL_FORMAT_RAW10:
967            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
968            for (size_t i = 0; i < count; i++) {
969                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
970                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
971                    sizeFound = true;
972                    break;
973                }
974            }
975            break;
976        case HAL_PIXEL_FORMAT_BLOB:
977            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
978            /* Verify set size against generated sizes table */
979            for (size_t i = 0; i < count; i++) {
980                if (((int32_t)rotatedWidth ==
981                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
982                        ((int32_t)rotatedHeight ==
983                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
984                    sizeFound = true;
985                    break;
986                }
987            }
988            break;
989        case HAL_PIXEL_FORMAT_YCbCr_420_888:
990        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
991        default:
992            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
993                    || newStream->stream_type == CAMERA3_STREAM_INPUT
994                    || IS_USAGE_ZSL(newStream->usage)) {
995                if (((int32_t)rotatedWidth ==
996                                gCamCapability[mCameraId]->active_array_size.width) &&
997                                ((int32_t)rotatedHeight ==
998                                gCamCapability[mCameraId]->active_array_size.height)) {
999                    sizeFound = true;
1000                    break;
1001                }
1002                /* We could potentially break here to enforce ZSL stream
1003                 * set from frameworks always is full active array size
1004                 * but it is not clear from the spc if framework will always
1005                 * follow that, also we have logic to override to full array
1006                 * size, so keeping the logic lenient at the moment
1007                 */
1008            }
1009            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1010                    MAX_SIZES_CNT);
1011            for (size_t i = 0; i < count; i++) {
1012                if (((int32_t)rotatedWidth ==
1013                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1014                            ((int32_t)rotatedHeight ==
1015                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1016                    sizeFound = true;
1017                    break;
1018                }
1019            }
1020            break;
1021        } /* End of switch(newStream->format) */
1022
1023        /* We error out even if a single stream has unsupported size set */
1024        if (!sizeFound) {
1025            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1026                    rotatedWidth, rotatedHeight, newStream->format,
1027                    gCamCapability[mCameraId]->active_array_size.width,
1028                    gCamCapability[mCameraId]->active_array_size.height);
1029            rc = -EINVAL;
1030            break;
1031        }
1032    } /* End of for each stream */
1033    return rc;
1034}
1035
1036/*==============================================================================
1037 * FUNCTION   : isSupportChannelNeeded
1038 *
1039 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1040 *
1041 * PARAMETERS :
1042 *   @stream_list : streams to be configured
1043 *   @stream_config_info : the config info for streams to be configured
1044 *
1045 * RETURN     : Boolen true/false decision
1046 *
1047 *==========================================================================*/
1048bool QCamera3HardwareInterface::isSupportChannelNeeded(
1049        camera3_stream_configuration_t *streamList,
1050        cam_stream_size_info_t stream_config_info)
1051{
1052    uint32_t i;
1053    bool pprocRequested = false;
1054    /* Check for conditions where PProc pipeline does not have any streams*/
1055    for (i = 0; i < stream_config_info.num_streams; i++) {
1056        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1057                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1058            pprocRequested = true;
1059            break;
1060        }
1061    }
1062
1063    if (pprocRequested == false )
1064        return true;
1065
1066    /* Dummy stream needed if only raw or jpeg streams present */
1067    for (i = 0; i < streamList->num_streams; i++) {
1068        switch(streamList->streams[i]->format) {
1069            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1070            case HAL_PIXEL_FORMAT_RAW10:
1071            case HAL_PIXEL_FORMAT_RAW16:
1072            case HAL_PIXEL_FORMAT_BLOB:
1073                break;
1074            default:
1075                return false;
1076        }
1077    }
1078    return true;
1079}
1080
1081/*==============================================================================
1082 * FUNCTION   : getSensorOutputSize
1083 *
1084 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1085 *
1086 * PARAMETERS :
1087 *   @sensor_dim : sensor output dimension (output)
1088 *
1089 * RETURN     : int32_t type of status
1090 *              NO_ERROR  -- success
1091 *              none-zero failure code
1092 *
1093 *==========================================================================*/
1094int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1095{
1096    int32_t rc = NO_ERROR;
1097
1098    cam_dimension_t max_dim = {0, 0};
1099    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1100        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1101            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1102        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1103            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1104    }
1105
1106    clear_metadata_buffer(mParameters);
1107
1108    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1109            max_dim);
1110    if (rc != NO_ERROR) {
1111        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1112        return rc;
1113    }
1114
1115    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1116    if (rc != NO_ERROR) {
1117        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1118        return rc;
1119    }
1120
1121    clear_metadata_buffer(mParameters);
1122    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1123
1124    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1125            mParameters);
1126    if (rc != NO_ERROR) {
1127        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1128        return rc;
1129    }
1130
1131    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1132    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1133
1134    return rc;
1135}
1136
1137/*==============================================================================
1138 * FUNCTION   : enablePowerHint
1139 *
1140 * DESCRIPTION: enable single powerhint for preview and different video modes.
1141 *
1142 * PARAMETERS :
1143 *
1144 * RETURN     : NULL
1145 *
1146 *==========================================================================*/
1147void QCamera3HardwareInterface::enablePowerHint()
1148{
1149    if (!mPowerHintEnabled) {
1150        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1151        mPowerHintEnabled = true;
1152    }
1153}
1154
1155/*==============================================================================
1156 * FUNCTION   : disablePowerHint
1157 *
1158 * DESCRIPTION: disable current powerhint.
1159 *
1160 * PARAMETERS :
1161 *
1162 * RETURN     : NULL
1163 *
1164 *==========================================================================*/
1165void QCamera3HardwareInterface::disablePowerHint()
1166{
1167    if (mPowerHintEnabled) {
1168        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1169        mPowerHintEnabled = false;
1170    }
1171}
1172
1173/*==============================================================================
1174 * FUNCTION   : addToPPFeatureMask
1175 *
1176 * DESCRIPTION: add additional features to pp feature mask based on
1177 *              stream type and usecase
1178 *
1179 * PARAMETERS :
1180 *   @stream_format : stream type for feature mask
1181 *   @stream_idx : stream idx within postprocess_mask list to change
1182 *
1183 * RETURN     : NULL
1184 *
1185 *==========================================================================*/
1186void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1187        uint32_t stream_idx)
1188{
1189    char feature_mask_value[PROPERTY_VALUE_MAX];
1190    cam_feature_mask_t feature_mask;
1191    int args_converted;
1192    int property_len;
1193
1194    /* Get feature mask from property */
1195    property_len = property_get("persist.camera.hal3.feature",
1196            feature_mask_value, "0");
1197    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1198            (feature_mask_value[1] == 'x')) {
1199        args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1200    } else {
1201        args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1202    }
1203    if (1 != args_converted) {
1204        feature_mask = 0;
1205        LOGE("Wrong feature mask %s", feature_mask_value);
1206        return;
1207    }
1208
1209    switch (stream_format) {
1210    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1211        /* Add LLVD to pp feature mask only if video hint is enabled */
1212        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1213            mStreamConfigInfo.postprocess_mask[stream_idx]
1214                    |= CAM_QTI_FEATURE_SW_TNR;
1215            LOGH("Added SW TNR to pp feature mask");
1216        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1217            mStreamConfigInfo.postprocess_mask[stream_idx]
1218                    |= CAM_QCOM_FEATURE_LLVD;
1219            LOGH("Added LLVD SeeMore to pp feature mask");
1220        }
1221        break;
1222    }
1223    default:
1224        break;
1225    }
1226    LOGD("PP feature mask %llx",
1227            mStreamConfigInfo.postprocess_mask[stream_idx]);
1228}
1229
1230/*==============================================================================
1231 * FUNCTION   : updateFpsInPreviewBuffer
1232 *
1233 * DESCRIPTION: update FPS information in preview buffer.
1234 *
1235 * PARAMETERS :
1236 *   @metadata    : pointer to metadata buffer
1237 *   @frame_number: frame_number to look for in pending buffer list
1238 *
1239 * RETURN     : None
1240 *
1241 *==========================================================================*/
1242void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1243        uint32_t frame_number)
1244{
1245    // Mark all pending buffers for this particular request
1246    // with corresponding framerate information
1247    for (List<PendingBuffersInRequest>::iterator req =
1248            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1249            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1250        for(List<PendingBufferInfo>::iterator j =
1251                req->mPendingBufferList.begin();
1252                j != req->mPendingBufferList.end(); j++) {
1253            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1254            if ((req->frame_number == frame_number) &&
1255                (channel->getStreamTypeMask() &
1256                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1257                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1258                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1259                    int32_t cameraFps = float_range->max_fps;
1260                    struct private_handle_t *priv_handle =
1261                        (struct private_handle_t *)(*(j->buffer));
1262                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1263                }
1264            }
1265        }
1266    }
1267}
1268
1269/*===========================================================================
1270 * FUNCTION   : configureStreams
1271 *
1272 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1273 *              and output streams.
1274 *
1275 * PARAMETERS :
1276 *   @stream_list : streams to be configured
1277 *
1278 * RETURN     :
1279 *
1280 *==========================================================================*/
1281int QCamera3HardwareInterface::configureStreams(
1282        camera3_stream_configuration_t *streamList)
1283{
1284    ATRACE_CALL();
1285    int rc = 0;
1286
1287    // Acquire perfLock before configure streams
1288    m_perfLock.lock_acq();
1289    rc = configureStreamsPerfLocked(streamList);
1290    m_perfLock.lock_rel();
1291
1292    return rc;
1293}
1294
1295/*===========================================================================
1296 * FUNCTION   : configureStreamsPerfLocked
1297 *
1298 * DESCRIPTION: configureStreams while perfLock is held.
1299 *
1300 * PARAMETERS :
1301 *   @stream_list : streams to be configured
1302 *
1303 * RETURN     : int32_t type of status
1304 *              NO_ERROR  -- success
1305 *              none-zero failure code
1306 *==========================================================================*/
1307int QCamera3HardwareInterface::configureStreamsPerfLocked(
1308        camera3_stream_configuration_t *streamList)
1309{
1310    ATRACE_CALL();
1311    int rc = 0;
1312
1313    // Sanity check stream_list
1314    if (streamList == NULL) {
1315        LOGE("NULL stream configuration");
1316        return BAD_VALUE;
1317    }
1318    if (streamList->streams == NULL) {
1319        LOGE("NULL stream list");
1320        return BAD_VALUE;
1321    }
1322
1323    if (streamList->num_streams < 1) {
1324        LOGE("Bad number of streams requested: %d",
1325                streamList->num_streams);
1326        return BAD_VALUE;
1327    }
1328
1329    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1330        LOGE("Maximum number of streams %d exceeded: %d",
1331                MAX_NUM_STREAMS, streamList->num_streams);
1332        return BAD_VALUE;
1333    }
1334
1335    mOpMode = streamList->operation_mode;
1336    LOGD("mOpMode: %d", mOpMode);
1337
1338    /* first invalidate all the steams in the mStreamList
1339     * if they appear again, they will be validated */
1340    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1341            it != mStreamInfo.end(); it++) {
1342        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1343        channel->stop();
1344        (*it)->status = INVALID;
1345    }
1346
1347    if (mRawDumpChannel) {
1348        mRawDumpChannel->stop();
1349        delete mRawDumpChannel;
1350        mRawDumpChannel = NULL;
1351    }
1352
1353    if (mSupportChannel)
1354        mSupportChannel->stop();
1355
1356    if (mAnalysisChannel) {
1357        mAnalysisChannel->stop();
1358    }
1359    if (mMetadataChannel) {
1360        /* If content of mStreamInfo is not 0, there is metadata stream */
1361        mMetadataChannel->stop();
1362    }
1363    if (mChannelHandle) {
1364        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1365                mChannelHandle);
1366        LOGD("stopping channel %d", mChannelHandle);
1367    }
1368
1369    pthread_mutex_lock(&mMutex);
1370
1371    // Check state
1372    switch (mState) {
1373        case INITIALIZED:
1374        case CONFIGURED:
1375        case STARTED:
1376            /* valid state */
1377            break;
1378        default:
1379            LOGE("Invalid state %d", mState);
1380            pthread_mutex_unlock(&mMutex);
1381            return -ENODEV;
1382    }
1383
1384    /* Check whether we have video stream */
1385    m_bIs4KVideo = false;
1386    m_bIsVideo = false;
1387    m_bEisSupportedSize = false;
1388    m_bTnrEnabled = false;
1389    bool isZsl = false;
1390    uint32_t videoWidth = 0U;
1391    uint32_t videoHeight = 0U;
1392    size_t rawStreamCnt = 0;
1393    size_t stallStreamCnt = 0;
1394    size_t processedStreamCnt = 0;
1395    // Number of streams on ISP encoder path
1396    size_t numStreamsOnEncoder = 0;
1397    size_t numYuv888OnEncoder = 0;
1398    bool bYuv888OverrideJpeg = false;
1399    cam_dimension_t largeYuv888Size = {0, 0};
1400    cam_dimension_t maxViewfinderSize = {0, 0};
1401    bool bJpegExceeds4K = false;
1402    bool bUseCommonFeatureMask = false;
1403    cam_feature_mask_t commonFeatureMask = 0;
1404    bool bSmallJpegSize = false;
1405    uint32_t width_ratio;
1406    uint32_t height_ratio;
1407    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1408    camera3_stream_t *inputStream = NULL;
1409    bool isJpeg = false;
1410    cam_dimension_t jpegSize = {0, 0};
1411
1412    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1413
1414    /*EIS configuration*/
1415    bool eisSupported = false;
1416    bool oisSupported = false;
1417    int32_t margin_index = -1;
1418    uint8_t eis_prop_set;
1419    uint32_t maxEisWidth = 0;
1420    uint32_t maxEisHeight = 0;
1421
1422    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1423
1424    size_t count = IS_TYPE_MAX;
1425    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1426    for (size_t i = 0; i < count; i++) {
1427        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1428            eisSupported = true;
1429            margin_index = (int32_t)i;
1430            break;
1431        }
1432    }
1433
1434    count = CAM_OPT_STAB_MAX;
1435    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1436    for (size_t i = 0; i < count; i++) {
1437        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1438            oisSupported = true;
1439            break;
1440        }
1441    }
1442
1443    if (eisSupported) {
1444        maxEisWidth = MAX_EIS_WIDTH;
1445        maxEisHeight = MAX_EIS_HEIGHT;
1446    }
1447
1448    /* EIS setprop control */
1449    char eis_prop[PROPERTY_VALUE_MAX];
1450    memset(eis_prop, 0, sizeof(eis_prop));
1451    property_get("persist.camera.eis.enable", eis_prop, "0");
1452    eis_prop_set = (uint8_t)atoi(eis_prop);
1453
1454    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1455            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1456
1457    /* stream configurations */
1458    for (size_t i = 0; i < streamList->num_streams; i++) {
1459        camera3_stream_t *newStream = streamList->streams[i];
1460        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1461                "height = %d, rotation = %d, usage = 0x%x",
1462                 i, newStream->stream_type, newStream->format,
1463                newStream->width, newStream->height, newStream->rotation,
1464                newStream->usage);
1465        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1466                newStream->stream_type == CAMERA3_STREAM_INPUT){
1467            isZsl = true;
1468        }
1469        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1470            inputStream = newStream;
1471        }
1472
1473        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1474            isJpeg = true;
1475            jpegSize.width = newStream->width;
1476            jpegSize.height = newStream->height;
1477            if (newStream->width > VIDEO_4K_WIDTH ||
1478                    newStream->height > VIDEO_4K_HEIGHT)
1479                bJpegExceeds4K = true;
1480        }
1481
1482        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1483                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1484            m_bIsVideo = true;
1485            videoWidth = newStream->width;
1486            videoHeight = newStream->height;
1487            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1488                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1489                m_bIs4KVideo = true;
1490            }
1491            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1492                                  (newStream->height <= maxEisHeight);
1493        }
1494        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1495                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1496            switch (newStream->format) {
1497            case HAL_PIXEL_FORMAT_BLOB:
1498                stallStreamCnt++;
1499                if (isOnEncoder(maxViewfinderSize, newStream->width,
1500                        newStream->height)) {
1501                    numStreamsOnEncoder++;
1502                }
1503                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1504                        newStream->width);
1505                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1506                        newStream->height);;
1507                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1508                        "FATAL: max_downscale_factor cannot be zero and so assert");
1509                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1510                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1511                    LOGH("Setting small jpeg size flag to true");
1512                    bSmallJpegSize = true;
1513                }
1514                break;
1515            case HAL_PIXEL_FORMAT_RAW10:
1516            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1517            case HAL_PIXEL_FORMAT_RAW16:
1518                rawStreamCnt++;
1519                break;
1520            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1521                processedStreamCnt++;
1522                if (isOnEncoder(maxViewfinderSize, newStream->width,
1523                        newStream->height)) {
1524                    if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1525                            !IS_USAGE_ZSL(newStream->usage)) {
1526                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1527                    }
1528                    numStreamsOnEncoder++;
1529                }
1530                break;
1531            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1532                processedStreamCnt++;
1533                if (isOnEncoder(maxViewfinderSize, newStream->width,
1534                        newStream->height)) {
1535                    // If Yuv888 size is not greater than 4K, set feature mask
1536                    // to SUPERSET so that it support concurrent request on
1537                    // YUV and JPEG.
1538                    if (newStream->width <= VIDEO_4K_WIDTH &&
1539                            newStream->height <= VIDEO_4K_HEIGHT) {
1540                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1541                    }
1542                    numStreamsOnEncoder++;
1543                    numYuv888OnEncoder++;
1544                    largeYuv888Size.width = newStream->width;
1545                    largeYuv888Size.height = newStream->height;
1546                }
1547                break;
1548            default:
1549                processedStreamCnt++;
1550                if (isOnEncoder(maxViewfinderSize, newStream->width,
1551                        newStream->height)) {
1552                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1553                    numStreamsOnEncoder++;
1554                }
1555                break;
1556            }
1557
1558        }
1559    }
1560
1561    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1562        !m_bIsVideo) {
1563        m_bEisEnable = false;
1564    }
1565
1566    /* Logic to enable/disable TNR based on specific config size/etc.*/
1567    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1568            ((videoWidth == 1920 && videoHeight == 1080) ||
1569            (videoWidth == 1280 && videoHeight == 720)) &&
1570            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1571        m_bTnrEnabled = true;
1572
1573    /* Check if num_streams is sane */
1574    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1575            rawStreamCnt > MAX_RAW_STREAMS ||
1576            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1577        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1578                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1579        pthread_mutex_unlock(&mMutex);
1580        return -EINVAL;
1581    }
1582    /* Check whether we have zsl stream or 4k video case */
1583    if (isZsl && m_bIsVideo) {
1584        LOGE("Currently invalid configuration ZSL&Video!");
1585        pthread_mutex_unlock(&mMutex);
1586        return -EINVAL;
1587    }
1588    /* Check if stream sizes are sane */
1589    if (numStreamsOnEncoder > 2) {
1590        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1591        pthread_mutex_unlock(&mMutex);
1592        return -EINVAL;
1593    } else if (1 < numStreamsOnEncoder){
1594        bUseCommonFeatureMask = true;
1595        LOGH("Multiple streams above max viewfinder size, common mask needed");
1596    }
1597
1598    /* Check if BLOB size is greater than 4k in 4k recording case */
1599    if (m_bIs4KVideo && bJpegExceeds4K) {
1600        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1601        pthread_mutex_unlock(&mMutex);
1602        return -EINVAL;
1603    }
1604
1605    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1606    // the YUV stream's size is greater or equal to the JPEG size, set common
1607    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1608    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1609            jpegSize.width, jpegSize.height) &&
1610            largeYuv888Size.width > jpegSize.width &&
1611            largeYuv888Size.height > jpegSize.height) {
1612        bYuv888OverrideJpeg = true;
1613    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1614        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1615    }
1616
1617    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1618            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1619            commonFeatureMask);
1620    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1621            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1622
1623    rc = validateStreamDimensions(streamList);
1624    if (rc == NO_ERROR) {
1625        rc = validateStreamRotations(streamList);
1626    }
1627    if (rc != NO_ERROR) {
1628        LOGE("Invalid stream configuration requested!");
1629        pthread_mutex_unlock(&mMutex);
1630        return rc;
1631    }
1632
1633    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1634    camera3_stream_t *jpegStream = NULL;
1635    for (size_t i = 0; i < streamList->num_streams; i++) {
1636        camera3_stream_t *newStream = streamList->streams[i];
1637        LOGH("newStream type = %d, stream format = %d "
1638                "stream size : %d x %d, stream rotation = %d",
1639                 newStream->stream_type, newStream->format,
1640                newStream->width, newStream->height, newStream->rotation);
1641        //if the stream is in the mStreamList validate it
1642        bool stream_exists = false;
1643        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1644                it != mStreamInfo.end(); it++) {
1645            if ((*it)->stream == newStream) {
1646                QCamera3ProcessingChannel *channel =
1647                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1648                stream_exists = true;
1649                if (channel)
1650                    delete channel;
1651                (*it)->status = VALID;
1652                (*it)->stream->priv = NULL;
1653                (*it)->channel = NULL;
1654            }
1655        }
1656        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1657            //new stream
1658            stream_info_t* stream_info;
1659            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1660            if (!stream_info) {
1661               LOGE("Could not allocate stream info");
1662               rc = -ENOMEM;
1663               pthread_mutex_unlock(&mMutex);
1664               return rc;
1665            }
1666            stream_info->stream = newStream;
1667            stream_info->status = VALID;
1668            stream_info->channel = NULL;
1669            mStreamInfo.push_back(stream_info);
1670        }
1671        /* Covers Opaque ZSL and API1 F/W ZSL */
1672        if (IS_USAGE_ZSL(newStream->usage)
1673                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1674            if (zslStream != NULL) {
1675                LOGE("Multiple input/reprocess streams requested!");
1676                pthread_mutex_unlock(&mMutex);
1677                return BAD_VALUE;
1678            }
1679            zslStream = newStream;
1680        }
1681        /* Covers YUV reprocess */
1682        if (inputStream != NULL) {
1683            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1684                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1685                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1686                    && inputStream->width == newStream->width
1687                    && inputStream->height == newStream->height) {
1688                if (zslStream != NULL) {
1689                    /* This scenario indicates multiple YUV streams with same size
1690                     * as input stream have been requested, since zsl stream handle
1691                     * is solely use for the purpose of overriding the size of streams
1692                     * which share h/w streams we will just make a guess here as to
1693                     * which of the stream is a ZSL stream, this will be refactored
1694                     * once we make generic logic for streams sharing encoder output
1695                     */
1696                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1697                }
1698                zslStream = newStream;
1699            }
1700        }
1701        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1702            jpegStream = newStream;
1703        }
1704    }
1705
1706    /* If a zsl stream is set, we know that we have configured at least one input or
1707       bidirectional stream */
1708    if (NULL != zslStream) {
1709        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1710        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1711        mInputStreamInfo.format = zslStream->format;
1712        mInputStreamInfo.usage = zslStream->usage;
1713        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1714                 mInputStreamInfo.dim.width,
1715                mInputStreamInfo.dim.height,
1716                mInputStreamInfo.format, mInputStreamInfo.usage);
1717    }
1718
1719    cleanAndSortStreamInfo();
1720    if (mMetadataChannel) {
1721        delete mMetadataChannel;
1722        mMetadataChannel = NULL;
1723    }
1724    if (mSupportChannel) {
1725        delete mSupportChannel;
1726        mSupportChannel = NULL;
1727    }
1728
1729    if (mAnalysisChannel) {
1730        delete mAnalysisChannel;
1731        mAnalysisChannel = NULL;
1732    }
1733
1734    if (mDummyBatchChannel) {
1735        delete mDummyBatchChannel;
1736        mDummyBatchChannel = NULL;
1737    }
1738
1739    //Create metadata channel and initialize it
1740    cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1741    setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1742            gCamCapability[mCameraId]->color_arrangement);
1743    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1744                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1745                    &padding_info, metadataFeatureMask, this);
1746    if (mMetadataChannel == NULL) {
1747        LOGE("failed to allocate metadata channel");
1748        rc = -ENOMEM;
1749        pthread_mutex_unlock(&mMutex);
1750        return rc;
1751    }
1752    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1753    if (rc < 0) {
1754        LOGE("metadata channel initialization failed");
1755        delete mMetadataChannel;
1756        mMetadataChannel = NULL;
1757        pthread_mutex_unlock(&mMutex);
1758        return rc;
1759    }
1760
1761    // Create analysis stream all the time, even when h/w support is not available
1762    {
1763        cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1764        setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1765                gCamCapability[mCameraId]->color_arrangement);
1766        cam_analysis_info_t analysisInfo;
1767        rc = mCommon.getAnalysisInfo(
1768                FALSE,
1769                TRUE,
1770                analysisFeatureMask,
1771                &analysisInfo);
1772        if (rc != NO_ERROR) {
1773            LOGE("getAnalysisInfo failed, ret = %d", rc);
1774            pthread_mutex_unlock(&mMutex);
1775            return rc;
1776        }
1777
1778        mAnalysisChannel = new QCamera3SupportChannel(
1779                mCameraHandle->camera_handle,
1780                mChannelHandle,
1781                mCameraHandle->ops,
1782                &analysisInfo.analysis_padding_info,
1783                analysisFeatureMask,
1784                CAM_STREAM_TYPE_ANALYSIS,
1785                &analysisInfo.analysis_max_res,
1786                (analysisInfo.analysis_format
1787                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1788                : CAM_FORMAT_YUV_420_NV21),
1789                analysisInfo.hw_analysis_supported,
1790                this,
1791                0); // force buffer count to 0
1792        if (!mAnalysisChannel) {
1793            LOGE("H/W Analysis channel cannot be created");
1794            pthread_mutex_unlock(&mMutex);
1795            return -ENOMEM;
1796        }
1797    }
1798
1799    bool isRawStreamRequested = false;
1800    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1801    /* Allocate channel objects for the requested streams */
1802    for (size_t i = 0; i < streamList->num_streams; i++) {
1803        camera3_stream_t *newStream = streamList->streams[i];
1804        uint32_t stream_usage = newStream->usage;
1805        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1806        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1807        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1808                || IS_USAGE_ZSL(newStream->usage)) &&
1809            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1810            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1811            if (bUseCommonFeatureMask) {
1812                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1813                        commonFeatureMask;
1814            } else {
1815                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1816                        CAM_QCOM_FEATURE_NONE;
1817            }
1818
1819        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1820                LOGH("Input stream configured, reprocess config");
1821        } else {
1822            //for non zsl streams find out the format
1823            switch (newStream->format) {
1824            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1825            {
1826                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1827                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1828                /* add additional features to pp feature mask */
1829                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1830                        mStreamConfigInfo.num_streams);
1831
1832                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1833                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1834                                CAM_STREAM_TYPE_VIDEO;
1835                    if (m_bTnrEnabled && m_bTnrVideo) {
1836                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1837                            CAM_QCOM_FEATURE_CPP_TNR;
1838                    }
1839                } else {
1840                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1841                            CAM_STREAM_TYPE_PREVIEW;
1842                    if (m_bTnrEnabled && m_bTnrPreview) {
1843                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1844                                CAM_QCOM_FEATURE_CPP_TNR;
1845                    }
1846                    padding_info.width_padding = mSurfaceStridePadding;
1847                    padding_info.height_padding = CAM_PAD_TO_2;
1848                }
1849                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1850                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1851                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1852                            newStream->height;
1853                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1854                            newStream->width;
1855                }
1856            }
1857            break;
1858            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1859                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1860                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1861                    if (bUseCommonFeatureMask)
1862                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1863                                commonFeatureMask;
1864                    else
1865                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1866                                CAM_QCOM_FEATURE_NONE;
1867                } else {
1868                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1869                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1870                }
1871            break;
1872            case HAL_PIXEL_FORMAT_BLOB:
1873                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1874                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1875                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1876                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1877                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1878                } else {
1879                    if (bUseCommonFeatureMask &&
1880                            isOnEncoder(maxViewfinderSize, newStream->width,
1881                            newStream->height)) {
1882                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1883                    } else {
1884                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1885                    }
1886                }
1887                if (isZsl) {
1888                    if (zslStream) {
1889                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1890                                (int32_t)zslStream->width;
1891                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1892                                (int32_t)zslStream->height;
1893                    } else {
1894                        LOGE("Error, No ZSL stream identified");
1895                        pthread_mutex_unlock(&mMutex);
1896                        return -EINVAL;
1897                    }
1898                } else if (m_bIs4KVideo) {
1899                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1900                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1901                } else if (bYuv888OverrideJpeg) {
1902                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1903                            (int32_t)largeYuv888Size.width;
1904                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1905                            (int32_t)largeYuv888Size.height;
1906                }
1907                break;
1908            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1909            case HAL_PIXEL_FORMAT_RAW16:
1910            case HAL_PIXEL_FORMAT_RAW10:
1911                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1912                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1913                isRawStreamRequested = true;
1914                break;
1915            default:
1916                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1917                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1918                break;
1919            }
1920        }
1921
1922        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1923                (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1924                gCamCapability[mCameraId]->color_arrangement);
1925
1926        if (newStream->priv == NULL) {
1927            //New stream, construct channel
1928            switch (newStream->stream_type) {
1929            case CAMERA3_STREAM_INPUT:
1930                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1931                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1932                break;
1933            case CAMERA3_STREAM_BIDIRECTIONAL:
1934                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1935                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1936                break;
1937            case CAMERA3_STREAM_OUTPUT:
1938                /* For video encoding stream, set read/write rarely
1939                 * flag so that they may be set to un-cached */
1940                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1941                    newStream->usage |=
1942                         (GRALLOC_USAGE_SW_READ_RARELY |
1943                         GRALLOC_USAGE_SW_WRITE_RARELY |
1944                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1945                else if (IS_USAGE_ZSL(newStream->usage))
1946                {
1947                    LOGD("ZSL usage flag skipping");
1948                }
1949                else if (newStream == zslStream
1950                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1951                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1952                } else
1953                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1954                break;
1955            default:
1956                LOGE("Invalid stream_type %d", newStream->stream_type);
1957                break;
1958            }
1959
1960            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1961                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1962                QCamera3ProcessingChannel *channel = NULL;
1963                switch (newStream->format) {
1964                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1965                    if ((newStream->usage &
1966                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1967                            (streamList->operation_mode ==
1968                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1969                    ) {
1970                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1971                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1972                                &gCamCapability[mCameraId]->padding_info,
1973                                this,
1974                                newStream,
1975                                (cam_stream_type_t)
1976                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1977                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1978                                mMetadataChannel,
1979                                0); //heap buffers are not required for HFR video channel
1980                        if (channel == NULL) {
1981                            LOGE("allocation of channel failed");
1982                            pthread_mutex_unlock(&mMutex);
1983                            return -ENOMEM;
1984                        }
1985                        //channel->getNumBuffers() will return 0 here so use
1986                        //MAX_INFLIGH_HFR_REQUESTS
1987                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1988                        newStream->priv = channel;
1989                        LOGI("num video buffers in HFR mode: %d",
1990                                 MAX_INFLIGHT_HFR_REQUESTS);
1991                    } else {
1992                        /* Copy stream contents in HFR preview only case to create
1993                         * dummy batch channel so that sensor streaming is in
1994                         * HFR mode */
1995                        if (!m_bIsVideo && (streamList->operation_mode ==
1996                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1997                            mDummyBatchStream = *newStream;
1998                        }
1999                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2000                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2001                                &gCamCapability[mCameraId]->padding_info,
2002                                this,
2003                                newStream,
2004                                (cam_stream_type_t)
2005                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2006                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2007                                mMetadataChannel,
2008                                MAX_INFLIGHT_REQUESTS);
2009                        if (channel == NULL) {
2010                            LOGE("allocation of channel failed");
2011                            pthread_mutex_unlock(&mMutex);
2012                            return -ENOMEM;
2013                        }
2014                        newStream->max_buffers = channel->getNumBuffers();
2015                        newStream->priv = channel;
2016                    }
2017                    break;
2018                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2019                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2020                            mChannelHandle,
2021                            mCameraHandle->ops, captureResultCb,
2022                            &padding_info,
2023                            this,
2024                            newStream,
2025                            (cam_stream_type_t)
2026                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2027                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2028                            mMetadataChannel);
2029                    if (channel == NULL) {
2030                        LOGE("allocation of YUV channel failed");
2031                        pthread_mutex_unlock(&mMutex);
2032                        return -ENOMEM;
2033                    }
2034                    newStream->max_buffers = channel->getNumBuffers();
2035                    newStream->priv = channel;
2036                    break;
2037                }
2038                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2039                case HAL_PIXEL_FORMAT_RAW16:
2040                case HAL_PIXEL_FORMAT_RAW10:
2041                    mRawChannel = new QCamera3RawChannel(
2042                            mCameraHandle->camera_handle, mChannelHandle,
2043                            mCameraHandle->ops, captureResultCb,
2044                            &padding_info,
2045                            this, newStream,
2046                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2047                            mMetadataChannel,
2048                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2049                    if (mRawChannel == NULL) {
2050                        LOGE("allocation of raw channel failed");
2051                        pthread_mutex_unlock(&mMutex);
2052                        return -ENOMEM;
2053                    }
2054                    newStream->max_buffers = mRawChannel->getNumBuffers();
2055                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2056                    break;
2057                case HAL_PIXEL_FORMAT_BLOB:
2058                    // Max live snapshot inflight buffer is 1. This is to mitigate
2059                    // frame drop issues for video snapshot. The more buffers being
2060                    // allocated, the more frame drops there are.
2061                    mPictureChannel = new QCamera3PicChannel(
2062                            mCameraHandle->camera_handle, mChannelHandle,
2063                            mCameraHandle->ops, captureResultCb,
2064                            &padding_info, this, newStream,
2065                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2066                            m_bIs4KVideo, isZsl, mMetadataChannel,
2067                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2068                    if (mPictureChannel == NULL) {
2069                        LOGE("allocation of channel failed");
2070                        pthread_mutex_unlock(&mMutex);
2071                        return -ENOMEM;
2072                    }
2073                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2074                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2075                    mPictureChannel->overrideYuvSize(
2076                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2077                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2078                    break;
2079
2080                default:
2081                    LOGE("not a supported format 0x%x", newStream->format);
2082                    break;
2083                }
2084            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2085                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2086            } else {
2087                LOGE("Error, Unknown stream type");
2088                pthread_mutex_unlock(&mMutex);
2089                return -EINVAL;
2090            }
2091
2092            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2093            if (channel != NULL && channel->isUBWCEnabled()) {
2094                cam_format_t fmt = channel->getStreamDefaultFormat(
2095                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
2096                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2097                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2098                }
2099            }
2100
2101            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2102                    it != mStreamInfo.end(); it++) {
2103                if ((*it)->stream == newStream) {
2104                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2105                    break;
2106                }
2107            }
2108        } else {
2109            // Channel already exists for this stream
2110            // Do nothing for now
2111        }
2112        padding_info = gCamCapability[mCameraId]->padding_info;
2113
2114        /* Do not add entries for input stream in metastream info
2115         * since there is no real stream associated with it
2116         */
2117        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2118            mStreamConfigInfo.num_streams++;
2119    }
2120
2121    //RAW DUMP channel
2122    if (mEnableRawDump && isRawStreamRequested == false){
2123        cam_dimension_t rawDumpSize;
2124        rawDumpSize = getMaxRawSize(mCameraId);
2125        cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2126        setPAAFSupport(rawDumpFeatureMask,
2127                CAM_STREAM_TYPE_RAW,
2128                gCamCapability[mCameraId]->color_arrangement);
2129        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2130                                  mChannelHandle,
2131                                  mCameraHandle->ops,
2132                                  rawDumpSize,
2133                                  &padding_info,
2134                                  this, rawDumpFeatureMask);
2135        if (!mRawDumpChannel) {
2136            LOGE("Raw Dump channel cannot be created");
2137            pthread_mutex_unlock(&mMutex);
2138            return -ENOMEM;
2139        }
2140    }
2141
2142
2143    if (mAnalysisChannel) {
2144        cam_analysis_info_t analysisInfo;
2145        memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2146        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2147                CAM_STREAM_TYPE_ANALYSIS;
2148        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2149                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2150        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2151                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2152                gCamCapability[mCameraId]->color_arrangement);
2153        rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2154                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2155                &analysisInfo);
2156        if (rc != NO_ERROR) {
2157            LOGE("getAnalysisInfo failed, ret = %d", rc);
2158            pthread_mutex_unlock(&mMutex);
2159            return rc;
2160        }
2161        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2162                analysisInfo.analysis_max_res;
2163        mStreamConfigInfo.num_streams++;
2164    }
2165
2166    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2167        cam_analysis_info_t supportInfo;
2168        memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2169        cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2170        setPAAFSupport(callbackFeatureMask,
2171                CAM_STREAM_TYPE_CALLBACK,
2172                gCamCapability[mCameraId]->color_arrangement);
2173        rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2174        if (rc != NO_ERROR) {
2175            LOGE("getAnalysisInfo failed, ret = %d", rc);
2176            pthread_mutex_unlock(&mMutex);
2177            return rc;
2178        }
2179        mSupportChannel = new QCamera3SupportChannel(
2180                mCameraHandle->camera_handle,
2181                mChannelHandle,
2182                mCameraHandle->ops,
2183                &gCamCapability[mCameraId]->padding_info,
2184                callbackFeatureMask,
2185                CAM_STREAM_TYPE_CALLBACK,
2186                &QCamera3SupportChannel::kDim,
2187                CAM_FORMAT_YUV_420_NV21,
2188                supportInfo.hw_analysis_supported,
2189                this);
2190        if (!mSupportChannel) {
2191            LOGE("dummy channel cannot be created");
2192            pthread_mutex_unlock(&mMutex);
2193            return -ENOMEM;
2194        }
2195    }
2196
2197    if (mSupportChannel) {
2198        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2199                QCamera3SupportChannel::kDim;
2200        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2201                CAM_STREAM_TYPE_CALLBACK;
2202        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2203                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2204        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2205                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2206                gCamCapability[mCameraId]->color_arrangement);
2207        mStreamConfigInfo.num_streams++;
2208    }
2209
2210    if (mRawDumpChannel) {
2211        cam_dimension_t rawSize;
2212        rawSize = getMaxRawSize(mCameraId);
2213        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2214                rawSize;
2215        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2216                CAM_STREAM_TYPE_RAW;
2217        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2218                CAM_QCOM_FEATURE_NONE;
2219        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2220                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2221                gCamCapability[mCameraId]->color_arrangement);
2222        mStreamConfigInfo.num_streams++;
2223    }
2224    /* In HFR mode, if video stream is not added, create a dummy channel so that
2225     * ISP can create a batch mode even for preview only case. This channel is
2226     * never 'start'ed (no stream-on), it is only 'initialized'  */
2227    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2228            !m_bIsVideo) {
2229        cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2230        setPAAFSupport(dummyFeatureMask,
2231                CAM_STREAM_TYPE_VIDEO,
2232                gCamCapability[mCameraId]->color_arrangement);
2233        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2234                mChannelHandle,
2235                mCameraHandle->ops, captureResultCb,
2236                &gCamCapability[mCameraId]->padding_info,
2237                this,
2238                &mDummyBatchStream,
2239                CAM_STREAM_TYPE_VIDEO,
2240                dummyFeatureMask,
2241                mMetadataChannel);
2242        if (NULL == mDummyBatchChannel) {
2243            LOGE("creation of mDummyBatchChannel failed."
2244                    "Preview will use non-hfr sensor mode ");
2245        }
2246    }
2247    if (mDummyBatchChannel) {
2248        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2249                mDummyBatchStream.width;
2250        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2251                mDummyBatchStream.height;
2252        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2253                CAM_STREAM_TYPE_VIDEO;
2254        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2255                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2256        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2257                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2258                gCamCapability[mCameraId]->color_arrangement);
2259        mStreamConfigInfo.num_streams++;
2260    }
2261
2262    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2263    mStreamConfigInfo.buffer_info.max_buffers =
2264            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2265
2266    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2267    for (pendingRequestIterator i = mPendingRequestsList.begin();
2268            i != mPendingRequestsList.end();) {
2269        i = erasePendingRequest(i);
2270    }
2271    mPendingFrameDropList.clear();
2272    // Initialize/Reset the pending buffers list
2273    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2274        req.mPendingBufferList.clear();
2275    }
2276    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2277
2278    mPendingReprocessResultList.clear();
2279
2280    mCurJpegMeta.clear();
2281    //Get min frame duration for this streams configuration
2282    deriveMinFrameDuration();
2283
2284    // Update state
2285    mState = CONFIGURED;
2286
2287    pthread_mutex_unlock(&mMutex);
2288
2289    return rc;
2290}
2291
2292/*===========================================================================
2293 * FUNCTION   : validateCaptureRequest
2294 *
2295 * DESCRIPTION: validate a capture request from camera service
2296 *
2297 * PARAMETERS :
2298 *   @request : request from framework to process
2299 *
2300 * RETURN     :
2301 *
2302 *==========================================================================*/
2303int QCamera3HardwareInterface::validateCaptureRequest(
2304                    camera3_capture_request_t *request)
2305{
2306    ssize_t idx = 0;
2307    const camera3_stream_buffer_t *b;
2308    CameraMetadata meta;
2309
2310    /* Sanity check the request */
2311    if (request == NULL) {
2312        LOGE("NULL capture request");
2313        return BAD_VALUE;
2314    }
2315
2316    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2317        /*settings cannot be null for the first request*/
2318        return BAD_VALUE;
2319    }
2320
2321    uint32_t frameNumber = request->frame_number;
2322    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2323        LOGE("Request %d: No output buffers provided!",
2324                __FUNCTION__, frameNumber);
2325        return BAD_VALUE;
2326    }
2327    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2328        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2329                 request->num_output_buffers, MAX_NUM_STREAMS);
2330        return BAD_VALUE;
2331    }
2332    if (request->input_buffer != NULL) {
2333        b = request->input_buffer;
2334        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2335            LOGE("Request %d: Buffer %ld: Status not OK!",
2336                     frameNumber, (long)idx);
2337            return BAD_VALUE;
2338        }
2339        if (b->release_fence != -1) {
2340            LOGE("Request %d: Buffer %ld: Has a release fence!",
2341                     frameNumber, (long)idx);
2342            return BAD_VALUE;
2343        }
2344        if (b->buffer == NULL) {
2345            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2346                     frameNumber, (long)idx);
2347            return BAD_VALUE;
2348        }
2349    }
2350
2351    // Validate all buffers
2352    b = request->output_buffers;
2353    do {
2354        QCamera3ProcessingChannel *channel =
2355                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2356        if (channel == NULL) {
2357            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2358                     frameNumber, (long)idx);
2359            return BAD_VALUE;
2360        }
2361        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2362            LOGE("Request %d: Buffer %ld: Status not OK!",
2363                     frameNumber, (long)idx);
2364            return BAD_VALUE;
2365        }
2366        if (b->release_fence != -1) {
2367            LOGE("Request %d: Buffer %ld: Has a release fence!",
2368                     frameNumber, (long)idx);
2369            return BAD_VALUE;
2370        }
2371        if (b->buffer == NULL) {
2372            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2373                     frameNumber, (long)idx);
2374            return BAD_VALUE;
2375        }
2376        if (*(b->buffer) == NULL) {
2377            LOGE("Request %d: Buffer %ld: NULL private handle!",
2378                     frameNumber, (long)idx);
2379            return BAD_VALUE;
2380        }
2381        idx++;
2382        b = request->output_buffers + idx;
2383    } while (idx < (ssize_t)request->num_output_buffers);
2384
2385    return NO_ERROR;
2386}
2387
2388/*===========================================================================
2389 * FUNCTION   : deriveMinFrameDuration
2390 *
2391 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2392 *              on currently configured streams.
2393 *
2394 * PARAMETERS : NONE
2395 *
2396 * RETURN     : NONE
2397 *
2398 *==========================================================================*/
2399void QCamera3HardwareInterface::deriveMinFrameDuration()
2400{
2401    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2402
2403    maxJpegDim = 0;
2404    maxProcessedDim = 0;
2405    maxRawDim = 0;
2406
2407    // Figure out maximum jpeg, processed, and raw dimensions
2408    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2409        it != mStreamInfo.end(); it++) {
2410
2411        // Input stream doesn't have valid stream_type
2412        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2413            continue;
2414
2415        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2416        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2417            if (dimension > maxJpegDim)
2418                maxJpegDim = dimension;
2419        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2420                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2421                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2422            if (dimension > maxRawDim)
2423                maxRawDim = dimension;
2424        } else {
2425            if (dimension > maxProcessedDim)
2426                maxProcessedDim = dimension;
2427        }
2428    }
2429
2430    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2431            MAX_SIZES_CNT);
2432
2433    //Assume all jpeg dimensions are in processed dimensions.
2434    if (maxJpegDim > maxProcessedDim)
2435        maxProcessedDim = maxJpegDim;
2436    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2437    if (maxProcessedDim > maxRawDim) {
2438        maxRawDim = INT32_MAX;
2439
2440        for (size_t i = 0; i < count; i++) {
2441            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2442                    gCamCapability[mCameraId]->raw_dim[i].height;
2443            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2444                maxRawDim = dimension;
2445        }
2446    }
2447
2448    //Find minimum durations for processed, jpeg, and raw
2449    for (size_t i = 0; i < count; i++) {
2450        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2451                gCamCapability[mCameraId]->raw_dim[i].height) {
2452            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2453            break;
2454        }
2455    }
2456    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2457    for (size_t i = 0; i < count; i++) {
2458        if (maxProcessedDim ==
2459                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2460                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2461            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2462            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2463            break;
2464        }
2465    }
2466}
2467
2468/*===========================================================================
2469 * FUNCTION   : getMinFrameDuration
2470 *
2471 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2472 *              and current request configuration.
2473 *
2474 * PARAMETERS : @request: requset sent by the frameworks
2475 *
2476 * RETURN     : min farme duration for a particular request
2477 *
2478 *==========================================================================*/
2479int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2480{
2481    bool hasJpegStream = false;
2482    bool hasRawStream = false;
2483    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2484        const camera3_stream_t *stream = request->output_buffers[i].stream;
2485        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2486            hasJpegStream = true;
2487        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2488                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2489                stream->format == HAL_PIXEL_FORMAT_RAW16)
2490            hasRawStream = true;
2491    }
2492
2493    if (!hasJpegStream)
2494        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2495    else
2496        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2497}
2498
2499/*===========================================================================
2500 * FUNCTION   : handleBuffersDuringFlushLock
2501 *
2502 * DESCRIPTION: Account for buffers returned from back-end during flush
2503 *              This function is executed while mMutex is held by the caller.
2504 *
2505 * PARAMETERS :
2506 *   @buffer: image buffer for the callback
2507 *
2508 * RETURN     :
2509 *==========================================================================*/
2510void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2511{
2512    bool buffer_found = false;
2513    for (List<PendingBuffersInRequest>::iterator req =
2514            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2515            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2516        for (List<PendingBufferInfo>::iterator i =
2517                req->mPendingBufferList.begin();
2518                i != req->mPendingBufferList.end(); i++) {
2519            if (i->buffer == buffer->buffer) {
2520                mPendingBuffersMap.numPendingBufsAtFlush--;
2521                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2522                    buffer->buffer, req->frame_number,
2523                    mPendingBuffersMap.numPendingBufsAtFlush);
2524                buffer_found = true;
2525                break;
2526            }
2527        }
2528        if (buffer_found) {
2529            break;
2530        }
2531    }
2532    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2533        //signal the flush()
2534        LOGD("All buffers returned to HAL. Continue flush");
2535        pthread_cond_signal(&mBuffersCond);
2536    }
2537}
2538
2539
2540/*===========================================================================
2541 * FUNCTION   : handlePendingReprocResults
2542 *
2543 * DESCRIPTION: check and notify on any pending reprocess results
2544 *
2545 * PARAMETERS :
2546 *   @frame_number   : Pending request frame number
2547 *
2548 * RETURN     : int32_t type of status
2549 *              NO_ERROR  -- success
2550 *              none-zero failure code
2551 *==========================================================================*/
2552int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2553{
2554    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2555            j != mPendingReprocessResultList.end(); j++) {
2556        if (j->frame_number == frame_number) {
2557            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2558
2559            LOGD("Delayed reprocess notify %d",
2560                    frame_number);
2561
2562            for (pendingRequestIterator k = mPendingRequestsList.begin();
2563                    k != mPendingRequestsList.end(); k++) {
2564
2565                if (k->frame_number == j->frame_number) {
2566                    LOGD("Found reprocess frame number %d in pending reprocess List "
2567                            "Take it out!!",
2568                            k->frame_number);
2569
2570                    camera3_capture_result result;
2571                    memset(&result, 0, sizeof(camera3_capture_result));
2572                    result.frame_number = frame_number;
2573                    result.num_output_buffers = 1;
2574                    result.output_buffers =  &j->buffer;
2575                    result.input_buffer = k->input_buffer;
2576                    result.result = k->settings;
2577                    result.partial_result = PARTIAL_RESULT_COUNT;
2578                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2579
2580                    erasePendingRequest(k);
2581                    break;
2582                }
2583            }
2584            mPendingReprocessResultList.erase(j);
2585            break;
2586        }
2587    }
2588    return NO_ERROR;
2589}
2590
2591/*===========================================================================
2592 * FUNCTION   : handleBatchMetadata
2593 *
2594 * DESCRIPTION: Handles metadata buffer callback in batch mode
2595 *
2596 * PARAMETERS : @metadata_buf: metadata buffer
2597 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2598 *                 the meta buf in this method
2599 *
2600 * RETURN     :
2601 *
2602 *==========================================================================*/
2603void QCamera3HardwareInterface::handleBatchMetadata(
2604        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2605{
2606    ATRACE_CALL();
2607
2608    if (NULL == metadata_buf) {
2609        LOGE("metadata_buf is NULL");
2610        return;
2611    }
2612    /* In batch mode, the metdata will contain the frame number and timestamp of
2613     * the last frame in the batch. Eg: a batch containing buffers from request
2614     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2615     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2616     * multiple process_capture_results */
2617    metadata_buffer_t *metadata =
2618            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2619    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2620    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2621    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2622    uint32_t frame_number = 0, urgent_frame_number = 0;
2623    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2624    bool invalid_metadata = false;
2625    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2626    size_t loopCount = 1;
2627
2628    int32_t *p_frame_number_valid =
2629            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2630    uint32_t *p_frame_number =
2631            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2632    int64_t *p_capture_time =
2633            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2634    int32_t *p_urgent_frame_number_valid =
2635            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2636    uint32_t *p_urgent_frame_number =
2637            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2638
2639    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2640            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2641            (NULL == p_urgent_frame_number)) {
2642        LOGE("Invalid metadata");
2643        invalid_metadata = true;
2644    } else {
2645        frame_number_valid = *p_frame_number_valid;
2646        last_frame_number = *p_frame_number;
2647        last_frame_capture_time = *p_capture_time;
2648        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2649        last_urgent_frame_number = *p_urgent_frame_number;
2650    }
2651
2652    /* In batchmode, when no video buffers are requested, set_parms are sent
2653     * for every capture_request. The difference between consecutive urgent
2654     * frame numbers and frame numbers should be used to interpolate the
2655     * corresponding frame numbers and time stamps */
2656    pthread_mutex_lock(&mMutex);
2657    if (urgent_frame_number_valid) {
2658        first_urgent_frame_number =
2659                mPendingBatchMap.valueFor(last_urgent_frame_number);
2660        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2661                first_urgent_frame_number;
2662
2663        LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2664                 urgent_frame_number_valid,
2665                first_urgent_frame_number, last_urgent_frame_number);
2666    }
2667
2668    if (frame_number_valid) {
2669        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2670        frameNumDiff = last_frame_number + 1 -
2671                first_frame_number;
2672        mPendingBatchMap.removeItem(last_frame_number);
2673
2674        LOGD("frm: valid: %d frm_num: %d - %d",
2675                 frame_number_valid,
2676                first_frame_number, last_frame_number);
2677
2678    }
2679    pthread_mutex_unlock(&mMutex);
2680
2681    if (urgent_frame_number_valid || frame_number_valid) {
2682        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2683        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2684            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2685                     urgentFrameNumDiff, last_urgent_frame_number);
2686        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2687            LOGE("frameNumDiff: %d frameNum: %d",
2688                     frameNumDiff, last_frame_number);
2689    }
2690
2691    for (size_t i = 0; i < loopCount; i++) {
2692        /* handleMetadataWithLock is called even for invalid_metadata for
2693         * pipeline depth calculation */
2694        if (!invalid_metadata) {
2695            /* Infer frame number. Batch metadata contains frame number of the
2696             * last frame */
2697            if (urgent_frame_number_valid) {
2698                if (i < urgentFrameNumDiff) {
2699                    urgent_frame_number =
2700                            first_urgent_frame_number + i;
2701                    LOGD("inferred urgent frame_number: %d",
2702                             urgent_frame_number);
2703                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2704                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2705                } else {
2706                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2707                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2708                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2709                }
2710            }
2711
2712            /* Infer frame number. Batch metadata contains frame number of the
2713             * last frame */
2714            if (frame_number_valid) {
2715                if (i < frameNumDiff) {
2716                    frame_number = first_frame_number + i;
2717                    LOGD("inferred frame_number: %d", frame_number);
2718                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2719                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2720                } else {
2721                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2722                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2723                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2724                }
2725            }
2726
2727            if (last_frame_capture_time) {
2728                //Infer timestamp
2729                first_frame_capture_time = last_frame_capture_time -
2730                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2731                capture_time =
2732                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2733                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2734                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2735                LOGD("batch capture_time: %lld, capture_time: %lld",
2736                         last_frame_capture_time, capture_time);
2737            }
2738        }
2739        pthread_mutex_lock(&mMutex);
2740        handleMetadataWithLock(metadata_buf,
2741                false /* free_and_bufdone_meta_buf */);
2742        pthread_mutex_unlock(&mMutex);
2743    }
2744
2745    /* BufDone metadata buffer */
2746    if (free_and_bufdone_meta_buf) {
2747        mMetadataChannel->bufDone(metadata_buf);
2748        free(metadata_buf);
2749    }
2750}
2751
2752/*===========================================================================
2753 * FUNCTION   : handleMetadataWithLock
2754 *
2755 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2756 *
2757 * PARAMETERS : @metadata_buf: metadata buffer
2758 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2759 *                 the meta buf in this method
2760 *
2761 * RETURN     :
2762 *
2763 *==========================================================================*/
2764void QCamera3HardwareInterface::handleMetadataWithLock(
2765    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2766{
2767    ATRACE_CALL();
2768    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2769        //during flush do not send metadata from this thread
2770        LOGD("not sending metadata during flush or when mState is error");
2771        if (free_and_bufdone_meta_buf) {
2772            mMetadataChannel->bufDone(metadata_buf);
2773            free(metadata_buf);
2774        }
2775        return;
2776    }
2777
2778    //not in flush
2779    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2780    int32_t frame_number_valid, urgent_frame_number_valid;
2781    uint32_t frame_number, urgent_frame_number;
2782    int64_t capture_time;
2783    nsecs_t currentSysTime;
2784
2785    int32_t *p_frame_number_valid =
2786            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2787    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2788    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2789    int32_t *p_urgent_frame_number_valid =
2790            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2791    uint32_t *p_urgent_frame_number =
2792            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2793    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2794            metadata) {
2795        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2796                 *p_frame_number_valid, *p_frame_number);
2797    }
2798
2799    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2800            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2801        LOGE("Invalid metadata");
2802        if (free_and_bufdone_meta_buf) {
2803            mMetadataChannel->bufDone(metadata_buf);
2804            free(metadata_buf);
2805        }
2806        goto done_metadata;
2807    }
2808    frame_number_valid =        *p_frame_number_valid;
2809    frame_number =              *p_frame_number;
2810    capture_time =              *p_capture_time;
2811    urgent_frame_number_valid = *p_urgent_frame_number_valid;
2812    urgent_frame_number =       *p_urgent_frame_number;
2813    currentSysTime =            systemTime(CLOCK_MONOTONIC);
2814
2815    // Detect if buffers from any requests are overdue
2816    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2817        if ( (currentSysTime - req.timestamp) >
2818            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2819            for (auto &missed : req.mPendingBufferList) {
2820                LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
2821                    "stream type = %d, stream format = %d",
2822                    frame_number, req.frame_number, missed.buffer,
2823                    missed.stream->stream_type, missed.stream->format);
2824            }
2825        }
2826    }
2827    //Partial result on process_capture_result for timestamp
2828    if (urgent_frame_number_valid) {
2829        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2830           urgent_frame_number, capture_time);
2831
2832        //Recieved an urgent Frame Number, handle it
2833        //using partial results
2834        for (pendingRequestIterator i =
2835                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2836            LOGD("Iterator Frame = %d urgent frame = %d",
2837                 i->frame_number, urgent_frame_number);
2838
2839            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2840                (i->partial_result_cnt == 0)) {
2841                LOGE("Error: HAL missed urgent metadata for frame number %d",
2842                         i->frame_number);
2843            }
2844
2845            if (i->frame_number == urgent_frame_number &&
2846                     i->bUrgentReceived == 0) {
2847
2848                camera3_capture_result_t result;
2849                memset(&result, 0, sizeof(camera3_capture_result_t));
2850
2851                i->partial_result_cnt++;
2852                i->bUrgentReceived = 1;
2853                // Extract 3A metadata
2854                result.result =
2855                    translateCbUrgentMetadataToResultMetadata(metadata);
2856                // Populate metadata result
2857                result.frame_number = urgent_frame_number;
2858                result.num_output_buffers = 0;
2859                result.output_buffers = NULL;
2860                result.partial_result = i->partial_result_cnt;
2861
2862                mCallbackOps->process_capture_result(mCallbackOps, &result);
2863                LOGD("urgent frame_number = %u, capture_time = %lld",
2864                      result.frame_number, capture_time);
2865                free_camera_metadata((camera_metadata_t *)result.result);
2866                break;
2867            }
2868        }
2869    }
2870
2871    if (!frame_number_valid) {
2872        LOGD("Not a valid normal frame number, used as SOF only");
2873        if (free_and_bufdone_meta_buf) {
2874            mMetadataChannel->bufDone(metadata_buf);
2875            free(metadata_buf);
2876        }
2877        goto done_metadata;
2878    }
2879    LOGH("valid frame_number = %u, capture_time = %lld",
2880            frame_number, capture_time);
2881
2882    for (pendingRequestIterator i = mPendingRequestsList.begin();
2883            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2884        // Flush out all entries with less or equal frame numbers.
2885
2886        camera3_capture_result_t result;
2887        memset(&result, 0, sizeof(camera3_capture_result_t));
2888
2889        LOGD("frame_number in the list is %u", i->frame_number);
2890        i->partial_result_cnt++;
2891        result.partial_result = i->partial_result_cnt;
2892
2893        // Check whether any stream buffer corresponding to this is dropped or not
2894        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2895        // The API does not expect a blob buffer to be dropped
2896        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2897            /* Clear notify_msg structure */
2898            camera3_notify_msg_t notify_msg;
2899            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2900            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2901                    j != i->buffers.end(); j++) {
2902                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2903                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2904                for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2905                    if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2906                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2907                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2908                                __func__, i->frame_number, streamID, j->stream->format);
2909                        notify_msg.type = CAMERA3_MSG_ERROR;
2910                        notify_msg.message.error.frame_number = i->frame_number;
2911                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2912                        notify_msg.message.error.error_stream = j->stream;
2913                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2914                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2915                                __func__, i->frame_number, streamID, j->stream->format);
2916                        PendingFrameDropInfo PendingFrameDrop;
2917                        PendingFrameDrop.frame_number=i->frame_number;
2918                        PendingFrameDrop.stream_ID = streamID;
2919                        // Add the Frame drop info to mPendingFrameDropList
2920                        mPendingFrameDropList.push_back(PendingFrameDrop);
2921                   }
2922               }
2923            }
2924        }
2925
2926        // Send empty metadata with already filled buffers for dropped metadata
2927        // and send valid metadata with already filled buffers for current metadata
2928        /* we could hit this case when we either
2929         * 1. have a pending reprocess request or
2930         * 2. miss a metadata buffer callback */
2931        if (i->frame_number < frame_number) {
2932            if (i->input_buffer) {
2933                /* this will be handled in handleInputBufferWithLock */
2934                i++;
2935                continue;
2936            } else {
2937                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
2938                if (free_and_bufdone_meta_buf) {
2939                    mMetadataChannel->bufDone(metadata_buf);
2940                    free(metadata_buf);
2941                }
2942                mState = ERROR;
2943                goto done_metadata;
2944            }
2945        } else {
2946            mPendingLiveRequest--;
2947            /* Clear notify_msg structure */
2948            camera3_notify_msg_t notify_msg;
2949            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2950
2951            // Send shutter notify to frameworks
2952            notify_msg.type = CAMERA3_MSG_SHUTTER;
2953            notify_msg.message.shutter.frame_number = i->frame_number;
2954            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2955            mCallbackOps->notify(mCallbackOps, &notify_msg);
2956
2957            i->timestamp = capture_time;
2958
2959            // Find channel requiring metadata, meaning internal offline postprocess
2960            // is needed.
2961            //TODO: for now, we don't support two streams requiring metadata at the same time.
2962            // (because we are not making copies, and metadata buffer is not reference counted.
2963            bool internalPproc = false;
2964            for (pendingBufferIterator iter = i->buffers.begin();
2965                    iter != i->buffers.end(); iter++) {
2966                if (iter->need_metadata) {
2967                    internalPproc = true;
2968                    QCamera3ProcessingChannel *channel =
2969                            (QCamera3ProcessingChannel *)iter->stream->priv;
2970                    channel->queueReprocMetadata(metadata_buf);
2971                    break;
2972                }
2973            }
2974
2975            result.result = translateFromHalMetadata(metadata,
2976                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2977                    i->capture_intent, i->hybrid_ae_enable,
2978                     /* DevCamDebug metadata translateFromHalMetadata function call*/
2979                    i->DevCamDebug_meta_enable,
2980                    /* DevCamDebug metadata end */
2981                    internalPproc, i->fwkCacMode);
2982
2983            saveExifParams(metadata);
2984
2985            if (i->blob_request) {
2986                {
2987                    //Dump tuning metadata if enabled and available
2988                    char prop[PROPERTY_VALUE_MAX];
2989                    memset(prop, 0, sizeof(prop));
2990                    property_get("persist.camera.dumpmetadata", prop, "0");
2991                    int32_t enabled = atoi(prop);
2992                    if (enabled && metadata->is_tuning_params_valid) {
2993                        dumpMetadataToFile(metadata->tuning_params,
2994                               mMetaFrameCount,
2995                               enabled,
2996                               "Snapshot",
2997                               frame_number);
2998                    }
2999                }
3000            }
3001
3002            if (!internalPproc) {
3003                LOGD("couldn't find need_metadata for this metadata");
3004                // Return metadata buffer
3005                if (free_and_bufdone_meta_buf) {
3006                    mMetadataChannel->bufDone(metadata_buf);
3007                    free(metadata_buf);
3008                }
3009            }
3010        }
3011        if (!result.result) {
3012            LOGE("metadata is NULL");
3013        }
3014        result.frame_number = i->frame_number;
3015        result.input_buffer = i->input_buffer;
3016        result.num_output_buffers = 0;
3017        result.output_buffers = NULL;
3018        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3019                    j != i->buffers.end(); j++) {
3020            if (j->buffer) {
3021                result.num_output_buffers++;
3022            }
3023        }
3024
3025        updateFpsInPreviewBuffer(metadata, i->frame_number);
3026
3027        if (result.num_output_buffers > 0) {
3028            camera3_stream_buffer_t *result_buffers =
3029                new camera3_stream_buffer_t[result.num_output_buffers];
3030            if (result_buffers != NULL) {
3031                size_t result_buffers_idx = 0;
3032                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3033                        j != i->buffers.end(); j++) {
3034                    if (j->buffer) {
3035                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3036                                m != mPendingFrameDropList.end(); m++) {
3037                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3038                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3039                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3040                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3041                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3042                                        frame_number, streamID);
3043                                m = mPendingFrameDropList.erase(m);
3044                                break;
3045                            }
3046                        }
3047                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
3048                        result_buffers[result_buffers_idx++] = *(j->buffer);
3049                        free(j->buffer);
3050                        j->buffer = NULL;
3051                    }
3052                }
3053                result.output_buffers = result_buffers;
3054                mCallbackOps->process_capture_result(mCallbackOps, &result);
3055                LOGD("meta frame_number = %u, capture_time = %lld",
3056                        result.frame_number, i->timestamp);
3057                free_camera_metadata((camera_metadata_t *)result.result);
3058                delete[] result_buffers;
3059            }else {
3060                LOGE("Fatal error: out of memory");
3061            }
3062        } else {
3063            mCallbackOps->process_capture_result(mCallbackOps, &result);
3064            LOGD("meta frame_number = %u, capture_time = %lld",
3065                    result.frame_number, i->timestamp);
3066            free_camera_metadata((camera_metadata_t *)result.result);
3067        }
3068
3069        i = erasePendingRequest(i);
3070
3071        if (!mPendingReprocessResultList.empty()) {
3072            handlePendingReprocResults(frame_number + 1);
3073        }
3074    }
3075
3076done_metadata:
3077    for (pendingRequestIterator i = mPendingRequestsList.begin();
3078            i != mPendingRequestsList.end() ;i++) {
3079        i->pipeline_depth++;
3080    }
3081    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3082    unblockRequestIfNecessary();
3083}
3084
3085/*===========================================================================
3086 * FUNCTION   : hdrPlusPerfLock
3087 *
3088 * DESCRIPTION: perf lock for HDR+ using custom intent
3089 *
3090 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3091 *
3092 * RETURN     : None
3093 *
3094 *==========================================================================*/
3095void QCamera3HardwareInterface::hdrPlusPerfLock(
3096        mm_camera_super_buf_t *metadata_buf)
3097{
3098    if (NULL == metadata_buf) {
3099        LOGE("metadata_buf is NULL");
3100        return;
3101    }
3102    metadata_buffer_t *metadata =
3103            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3104    int32_t *p_frame_number_valid =
3105            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3106    uint32_t *p_frame_number =
3107            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3108
3109    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3110        LOGE("%s: Invalid metadata", __func__);
3111        return;
3112    }
3113
3114    //acquire perf lock for 5 sec after the last HDR frame is captured
3115    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3116        if ((p_frame_number != NULL) &&
3117                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3118            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3119        }
3120    }
3121
3122    //release lock after perf lock timer is expired. If lock is already released,
3123    //isTimerReset returns false
3124    if (m_perfLock.isTimerReset()) {
3125        mLastCustIntentFrmNum = -1;
3126        m_perfLock.lock_rel_timed();
3127    }
3128}
3129
3130/*===========================================================================
3131 * FUNCTION   : handleInputBufferWithLock
3132 *
3133 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3134 *
3135 * PARAMETERS : @frame_number: frame number of the input buffer
3136 *
3137 * RETURN     :
3138 *
3139 *==========================================================================*/
3140void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3141{
3142    ATRACE_CALL();
3143    pendingRequestIterator i = mPendingRequestsList.begin();
3144    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3145        i++;
3146    }
3147    if (i != mPendingRequestsList.end() && i->input_buffer) {
3148        //found the right request
3149        if (!i->shutter_notified) {
3150            CameraMetadata settings;
3151            camera3_notify_msg_t notify_msg;
3152            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3153            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3154            if(i->settings) {
3155                settings = i->settings;
3156                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3157                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3158                } else {
3159                    LOGE("No timestamp in input settings! Using current one.");
3160                }
3161            } else {
3162                LOGE("Input settings missing!");
3163            }
3164
3165            notify_msg.type = CAMERA3_MSG_SHUTTER;
3166            notify_msg.message.shutter.frame_number = frame_number;
3167            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3168            mCallbackOps->notify(mCallbackOps, &notify_msg);
3169            i->shutter_notified = true;
3170            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3171                        i->frame_number, notify_msg.message.shutter.timestamp);
3172        }
3173
3174        if (i->input_buffer->release_fence != -1) {
3175           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3176           close(i->input_buffer->release_fence);
3177           if (rc != OK) {
3178               LOGE("input buffer sync wait failed %d", rc);
3179           }
3180        }
3181
3182        camera3_capture_result result;
3183        memset(&result, 0, sizeof(camera3_capture_result));
3184        result.frame_number = frame_number;
3185        result.result = i->settings;
3186        result.input_buffer = i->input_buffer;
3187        result.partial_result = PARTIAL_RESULT_COUNT;
3188
3189        mCallbackOps->process_capture_result(mCallbackOps, &result);
3190        LOGD("Input request metadata and input buffer frame_number = %u",
3191                        i->frame_number);
3192        i = erasePendingRequest(i);
3193    } else {
3194        LOGE("Could not find input request for frame number %d", frame_number);
3195    }
3196}
3197
3198/*===========================================================================
3199 * FUNCTION   : handleBufferWithLock
3200 *
3201 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3202 *
3203 * PARAMETERS : @buffer: image buffer for the callback
3204 *              @frame_number: frame number of the image buffer
3205 *
3206 * RETURN     :
3207 *
3208 *==========================================================================*/
3209void QCamera3HardwareInterface::handleBufferWithLock(
3210    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3211{
3212    ATRACE_CALL();
3213    /* Nothing to be done during error state */
3214    if ((ERROR == mState) || (DEINIT == mState)) {
3215        return;
3216    }
3217    if (mFlushPerf) {
3218        handleBuffersDuringFlushLock(buffer);
3219        return;
3220    }
3221    //not in flush
3222    // If the frame number doesn't exist in the pending request list,
3223    // directly send the buffer to the frameworks, and update pending buffers map
3224    // Otherwise, book-keep the buffer.
3225    pendingRequestIterator i = mPendingRequestsList.begin();
3226    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3227        i++;
3228    }
3229    if (i == mPendingRequestsList.end()) {
3230        // Verify all pending requests frame_numbers are greater
3231        for (pendingRequestIterator j = mPendingRequestsList.begin();
3232                j != mPendingRequestsList.end(); j++) {
3233            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3234                LOGW("Error: pending live frame number %d is smaller than %d",
3235                         j->frame_number, frame_number);
3236            }
3237        }
3238        camera3_capture_result_t result;
3239        memset(&result, 0, sizeof(camera3_capture_result_t));
3240        result.result = NULL;
3241        result.frame_number = frame_number;
3242        result.num_output_buffers = 1;
3243        result.partial_result = 0;
3244        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3245                m != mPendingFrameDropList.end(); m++) {
3246            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3247            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3248            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3249                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3250                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3251                         frame_number, streamID);
3252                m = mPendingFrameDropList.erase(m);
3253                break;
3254            }
3255        }
3256        result.output_buffers = buffer;
3257        LOGH("result frame_number = %d, buffer = %p",
3258                 frame_number, buffer->buffer);
3259
3260        mPendingBuffersMap.removeBuf(buffer->buffer);
3261
3262        mCallbackOps->process_capture_result(mCallbackOps, &result);
3263    } else {
3264        if (i->input_buffer) {
3265            CameraMetadata settings;
3266            camera3_notify_msg_t notify_msg;
3267            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3268            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3269            if(i->settings) {
3270                settings = i->settings;
3271                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3272                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3273                } else {
3274                    LOGW("No timestamp in input settings! Using current one.");
3275                }
3276            } else {
3277                LOGE("Input settings missing!");
3278            }
3279
3280            notify_msg.type = CAMERA3_MSG_SHUTTER;
3281            notify_msg.message.shutter.frame_number = frame_number;
3282            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3283
3284            if (i->input_buffer->release_fence != -1) {
3285               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3286               close(i->input_buffer->release_fence);
3287               if (rc != OK) {
3288                   LOGE("input buffer sync wait failed %d", rc);
3289               }
3290            }
3291            mPendingBuffersMap.removeBuf(buffer->buffer);
3292
3293            bool notifyNow = true;
3294            for (pendingRequestIterator j = mPendingRequestsList.begin();
3295                    j != mPendingRequestsList.end(); j++) {
3296                if (j->frame_number < frame_number) {
3297                    notifyNow = false;
3298                    break;
3299                }
3300            }
3301
3302            if (notifyNow) {
3303                camera3_capture_result result;
3304                memset(&result, 0, sizeof(camera3_capture_result));
3305                result.frame_number = frame_number;
3306                result.result = i->settings;
3307                result.input_buffer = i->input_buffer;
3308                result.num_output_buffers = 1;
3309                result.output_buffers = buffer;
3310                result.partial_result = PARTIAL_RESULT_COUNT;
3311
3312                mCallbackOps->notify(mCallbackOps, &notify_msg);
3313                mCallbackOps->process_capture_result(mCallbackOps, &result);
3314                LOGD("Notify reprocess now %d!", frame_number);
3315                i = erasePendingRequest(i);
3316            } else {
3317                // Cache reprocess result for later
3318                PendingReprocessResult pendingResult;
3319                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3320                pendingResult.notify_msg = notify_msg;
3321                pendingResult.buffer = *buffer;
3322                pendingResult.frame_number = frame_number;
3323                mPendingReprocessResultList.push_back(pendingResult);
3324                LOGD("Cache reprocess result %d!", frame_number);
3325            }
3326        } else {
3327            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3328                j != i->buffers.end(); j++) {
3329                if (j->stream == buffer->stream) {
3330                    if (j->buffer != NULL) {
3331                        LOGE("Error: buffer is already set");
3332                    } else {
3333                        j->buffer = (camera3_stream_buffer_t *)malloc(
3334                            sizeof(camera3_stream_buffer_t));
3335                        *(j->buffer) = *buffer;
3336                        LOGH("cache buffer %p at result frame_number %u",
3337                             buffer->buffer, frame_number);
3338                    }
3339                }
3340            }
3341        }
3342    }
3343}
3344
3345/*===========================================================================
3346 * FUNCTION   : unblockRequestIfNecessary
3347 *
3348 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3349 *              that mMutex is held when this function is called.
3350 *
3351 * PARAMETERS :
3352 *
3353 * RETURN     :
3354 *
3355 *==========================================================================*/
3356void QCamera3HardwareInterface::unblockRequestIfNecessary()
3357{
3358   // Unblock process_capture_request
3359   pthread_cond_signal(&mRequestCond);
3360}
3361
3362
3363/*===========================================================================
3364 * FUNCTION   : processCaptureRequest
3365 *
3366 * DESCRIPTION: process a capture request from camera service
3367 *
3368 * PARAMETERS :
3369 *   @request : request from framework to process
3370 *
3371 * RETURN     :
3372 *
3373 *==========================================================================*/
3374int QCamera3HardwareInterface::processCaptureRequest(
3375                    camera3_capture_request_t *request)
3376{
3377    ATRACE_CALL();
3378    int rc = NO_ERROR;
3379    int32_t request_id;
3380    CameraMetadata meta;
3381    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3382    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3383    bool isVidBufRequested = false;
3384    camera3_stream_buffer_t *pInputBuffer = NULL;
3385
3386    pthread_mutex_lock(&mMutex);
3387
3388    // Validate current state
3389    switch (mState) {
3390        case CONFIGURED:
3391        case STARTED:
3392            /* valid state */
3393            break;
3394
3395        case ERROR:
3396            pthread_mutex_unlock(&mMutex);
3397            handleCameraDeviceError();
3398            return -ENODEV;
3399
3400        default:
3401            LOGE("Invalid state %d", mState);
3402            pthread_mutex_unlock(&mMutex);
3403            return -ENODEV;
3404    }
3405
3406    rc = validateCaptureRequest(request);
3407    if (rc != NO_ERROR) {
3408        LOGE("incoming request is not valid");
3409        pthread_mutex_unlock(&mMutex);
3410        return rc;
3411    }
3412
3413    meta = request->settings;
3414
3415    // For first capture request, send capture intent, and
3416    // stream on all streams
3417    if (mState == CONFIGURED) {
3418        // send an unconfigure to the backend so that the isp
3419        // resources are deallocated
3420        if (!mFirstConfiguration) {
3421            cam_stream_size_info_t stream_config_info;
3422            int32_t hal_version = CAM_HAL_V3;
3423            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3424            stream_config_info.buffer_info.min_buffers =
3425                    MIN_INFLIGHT_REQUESTS;
3426            stream_config_info.buffer_info.max_buffers =
3427                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3428            clear_metadata_buffer(mParameters);
3429            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3430                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3431            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3432                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3433            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3434                    mParameters);
3435            if (rc < 0) {
3436                LOGE("set_parms for unconfigure failed");
3437                pthread_mutex_unlock(&mMutex);
3438                return rc;
3439            }
3440        }
3441        m_perfLock.lock_acq();
3442        /* get eis information for stream configuration */
3443        cam_is_type_t is_type;
3444        char is_type_value[PROPERTY_VALUE_MAX];
3445        property_get("persist.camera.is_type", is_type_value, "0");
3446        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3447
3448        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3449            int32_t hal_version = CAM_HAL_V3;
3450            uint8_t captureIntent =
3451                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3452            mCaptureIntent = captureIntent;
3453            clear_metadata_buffer(mParameters);
3454            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3455            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3456        }
3457
3458        //If EIS is enabled, turn it on for video
3459        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3460        int32_t vsMode;
3461        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3462        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3463            rc = BAD_VALUE;
3464        }
3465
3466        //IS type will be 0 unless EIS is supported. If EIS is supported
3467        //it could either be 1 or 4 depending on the stream and video size
3468        if (setEis) {
3469            if (!m_bEisSupportedSize) {
3470                is_type = IS_TYPE_DIS;
3471            } else {
3472                is_type = IS_TYPE_EIS_2_0;
3473            }
3474            mStreamConfigInfo.is_type = is_type;
3475        } else {
3476            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3477        }
3478
3479        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3480                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3481        int32_t tintless_value = 1;
3482        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3483                CAM_INTF_PARM_TINTLESS, tintless_value);
3484        //Disable CDS for HFR mode or if DIS/EIS is on.
3485        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3486        //after every configure_stream
3487        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3488                (m_bIsVideo)) {
3489            int32_t cds = CAM_CDS_MODE_OFF;
3490            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3491                    CAM_INTF_PARM_CDS_MODE, cds))
3492                LOGE("Failed to disable CDS for HFR mode");
3493
3494        }
3495        setMobicat();
3496
3497        /* Set fps and hfr mode while sending meta stream info so that sensor
3498         * can configure appropriate streaming mode */
3499        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3500        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3501            rc = setHalFpsRange(meta, mParameters);
3502            if (rc != NO_ERROR) {
3503                LOGE("setHalFpsRange failed");
3504            }
3505        }
3506        if (meta.exists(ANDROID_CONTROL_MODE)) {
3507            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3508            rc = extractSceneMode(meta, metaMode, mParameters);
3509            if (rc != NO_ERROR) {
3510                LOGE("extractSceneMode failed");
3511            }
3512        }
3513
3514        //TODO: validate the arguments, HSV scenemode should have only the
3515        //advertised fps ranges
3516
3517        /*set the capture intent, hal version, tintless, stream info,
3518         *and disenable parameters to the backend*/
3519        LOGD("set_parms META_STREAM_INFO " );
3520        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3521            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3522                    "Format:%d",
3523                    mStreamConfigInfo.type[i],
3524                    mStreamConfigInfo.stream_sizes[i].width,
3525                    mStreamConfigInfo.stream_sizes[i].height,
3526                    mStreamConfigInfo.postprocess_mask[i],
3527                    mStreamConfigInfo.format[i]);
3528        }
3529        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3530                    mParameters);
3531        if (rc < 0) {
3532            LOGE("set_parms failed for hal version, stream info");
3533        }
3534
3535        cam_dimension_t sensor_dim;
3536        memset(&sensor_dim, 0, sizeof(sensor_dim));
3537        rc = getSensorOutputSize(sensor_dim);
3538        if (rc != NO_ERROR) {
3539            LOGE("Failed to get sensor output size");
3540            pthread_mutex_unlock(&mMutex);
3541            goto error_exit;
3542        }
3543
3544        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3545                gCamCapability[mCameraId]->active_array_size.height,
3546                sensor_dim.width, sensor_dim.height);
3547
3548        /* Set batchmode before initializing channel. Since registerBuffer
3549         * internally initializes some of the channels, better set batchmode
3550         * even before first register buffer */
3551        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3552            it != mStreamInfo.end(); it++) {
3553            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3554            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3555                    && mBatchSize) {
3556                rc = channel->setBatchSize(mBatchSize);
3557                //Disable per frame map unmap for HFR/batchmode case
3558                rc |= channel->setPerFrameMapUnmap(false);
3559                if (NO_ERROR != rc) {
3560                    LOGE("Channel init failed %d", rc);
3561                    pthread_mutex_unlock(&mMutex);
3562                    goto error_exit;
3563                }
3564            }
3565        }
3566
3567        //First initialize all streams
3568        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3569            it != mStreamInfo.end(); it++) {
3570            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3571            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3572               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3573               setEis)
3574                rc = channel->initialize(is_type);
3575            else {
3576                rc = channel->initialize(IS_TYPE_NONE);
3577            }
3578            if (NO_ERROR != rc) {
3579                LOGE("Channel initialization failed %d", rc);
3580                pthread_mutex_unlock(&mMutex);
3581                goto error_exit;
3582            }
3583        }
3584
3585        if (mRawDumpChannel) {
3586            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3587            if (rc != NO_ERROR) {
3588                LOGE("Error: Raw Dump Channel init failed");
3589                pthread_mutex_unlock(&mMutex);
3590                goto error_exit;
3591            }
3592        }
3593        if (mSupportChannel) {
3594            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3595            if (rc < 0) {
3596                LOGE("Support channel initialization failed");
3597                pthread_mutex_unlock(&mMutex);
3598                goto error_exit;
3599            }
3600        }
3601        if (mAnalysisChannel) {
3602            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3603            if (rc < 0) {
3604                LOGE("Analysis channel initialization failed");
3605                pthread_mutex_unlock(&mMutex);
3606                goto error_exit;
3607            }
3608        }
3609        if (mDummyBatchChannel) {
3610            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3611            if (rc < 0) {
3612                LOGE("mDummyBatchChannel setBatchSize failed");
3613                pthread_mutex_unlock(&mMutex);
3614                goto error_exit;
3615            }
3616            rc = mDummyBatchChannel->initialize(is_type);
3617            if (rc < 0) {
3618                LOGE("mDummyBatchChannel initialization failed");
3619                pthread_mutex_unlock(&mMutex);
3620                goto error_exit;
3621            }
3622        }
3623
3624        // Set bundle info
3625        rc = setBundleInfo();
3626        if (rc < 0) {
3627            LOGE("setBundleInfo failed %d", rc);
3628            pthread_mutex_unlock(&mMutex);
3629            goto error_exit;
3630        }
3631
3632        //update settings from app here
3633        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3634            mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3635            LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3636        }
3637        if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3638            mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3639            LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3640        }
3641        if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3642            mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3643            LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3644
3645            if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3646                (mLinkedCameraId != mCameraId) ) {
3647                LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3648                    mLinkedCameraId, mCameraId);
3649                goto error_exit;
3650            }
3651        }
3652
3653        // add bundle related cameras
3654        LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3655        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3656            if (mIsDeviceLinked)
3657                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3658            else
3659                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3660
3661            pthread_mutex_lock(&gCamLock);
3662
3663            if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3664                LOGE("Dualcam: Invalid Session Id ");
3665                pthread_mutex_unlock(&gCamLock);
3666                goto error_exit;
3667            }
3668
3669            if (mIsMainCamera == 1) {
3670                m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3671                m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3672                // related session id should be session id of linked session
3673                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3674            } else {
3675                m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3676                m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3677                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3678            }
3679            pthread_mutex_unlock(&gCamLock);
3680
3681            rc = mCameraHandle->ops->sync_related_sensors(
3682                    mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3683            if (rc < 0) {
3684                LOGE("Dualcam: link failed");
3685                goto error_exit;
3686            }
3687        }
3688
3689        //Then start them.
3690        LOGH("Start META Channel");
3691        rc = mMetadataChannel->start();
3692        if (rc < 0) {
3693            LOGE("META channel start failed");
3694            pthread_mutex_unlock(&mMutex);
3695            goto error_exit;
3696        }
3697
3698        if (mAnalysisChannel) {
3699            rc = mAnalysisChannel->start();
3700            if (rc < 0) {
3701                LOGE("Analysis channel start failed");
3702                mMetadataChannel->stop();
3703                pthread_mutex_unlock(&mMutex);
3704                goto error_exit;
3705            }
3706        }
3707
3708        if (mSupportChannel) {
3709            rc = mSupportChannel->start();
3710            if (rc < 0) {
3711                LOGE("Support channel start failed");
3712                mMetadataChannel->stop();
3713                /* Although support and analysis are mutually exclusive today
3714                   adding it in anycase for future proofing */
3715                if (mAnalysisChannel) {
3716                    mAnalysisChannel->stop();
3717                }
3718                pthread_mutex_unlock(&mMutex);
3719                goto error_exit;
3720            }
3721        }
3722        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3723            it != mStreamInfo.end(); it++) {
3724            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3725            LOGH("Start Processing Channel mask=%d",
3726                     channel->getStreamTypeMask());
3727            rc = channel->start();
3728            if (rc < 0) {
3729                LOGE("channel start failed");
3730                pthread_mutex_unlock(&mMutex);
3731                goto error_exit;
3732            }
3733        }
3734
3735        if (mRawDumpChannel) {
3736            LOGD("Starting raw dump stream");
3737            rc = mRawDumpChannel->start();
3738            if (rc != NO_ERROR) {
3739                LOGE("Error Starting Raw Dump Channel");
3740                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3741                      it != mStreamInfo.end(); it++) {
3742                    QCamera3Channel *channel =
3743                        (QCamera3Channel *)(*it)->stream->priv;
3744                    LOGH("Stopping Processing Channel mask=%d",
3745                        channel->getStreamTypeMask());
3746                    channel->stop();
3747                }
3748                if (mSupportChannel)
3749                    mSupportChannel->stop();
3750                if (mAnalysisChannel) {
3751                    mAnalysisChannel->stop();
3752                }
3753                mMetadataChannel->stop();
3754                pthread_mutex_unlock(&mMutex);
3755                goto error_exit;
3756            }
3757        }
3758
3759        if (mChannelHandle) {
3760
3761            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3762                    mChannelHandle);
3763            if (rc != NO_ERROR) {
3764                LOGE("start_channel failed %d", rc);
3765                pthread_mutex_unlock(&mMutex);
3766                goto error_exit;
3767            }
3768        }
3769
3770        goto no_error;
3771error_exit:
3772        m_perfLock.lock_rel();
3773        return rc;
3774no_error:
3775        m_perfLock.lock_rel();
3776
3777        mWokenUpByDaemon = false;
3778        mPendingLiveRequest = 0;
3779        mFirstConfiguration = false;
3780        enablePowerHint();
3781    }
3782
3783    uint32_t frameNumber = request->frame_number;
3784    cam_stream_ID_t streamID;
3785
3786    if (mFlushPerf) {
3787        //we cannot accept any requests during flush
3788        LOGE("process_capture_request cannot proceed during flush");
3789        pthread_mutex_unlock(&mMutex);
3790        return NO_ERROR; //should return an error
3791    }
3792
3793    if (meta.exists(ANDROID_REQUEST_ID)) {
3794        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3795        mCurrentRequestId = request_id;
3796        LOGD("Received request with id: %d", request_id);
3797    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3798        LOGE("Unable to find request id field, \
3799                & no previous id available");
3800        pthread_mutex_unlock(&mMutex);
3801        return NAME_NOT_FOUND;
3802    } else {
3803        LOGD("Re-using old request id");
3804        request_id = mCurrentRequestId;
3805    }
3806
3807    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3808                                    request->num_output_buffers,
3809                                    request->input_buffer,
3810                                    frameNumber);
3811    // Acquire all request buffers first
3812    streamID.num_streams = 0;
3813    int blob_request = 0;
3814    uint32_t snapshotStreamId = 0;
3815    for (size_t i = 0; i < request->num_output_buffers; i++) {
3816        const camera3_stream_buffer_t& output = request->output_buffers[i];
3817        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3818
3819        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3820            //Call function to store local copy of jpeg data for encode params.
3821            blob_request = 1;
3822            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3823        }
3824
3825        if (output.acquire_fence != -1) {
3826           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3827           close(output.acquire_fence);
3828           if (rc != OK) {
3829              LOGE("sync wait failed %d", rc);
3830              pthread_mutex_unlock(&mMutex);
3831              return rc;
3832           }
3833        }
3834
3835        streamID.streamID[streamID.num_streams] =
3836            channel->getStreamID(channel->getStreamTypeMask());
3837        streamID.num_streams++;
3838
3839        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3840            isVidBufRequested = true;
3841        }
3842    }
3843
3844    if (blob_request) {
3845        KPI_ATRACE_INT("SNAPSHOT", 1);
3846    }
3847    if (blob_request && mRawDumpChannel) {
3848        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3849        streamID.streamID[streamID.num_streams] =
3850            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3851        streamID.num_streams++;
3852    }
3853
3854    if(request->input_buffer == NULL) {
3855        /* Parse the settings:
3856         * - For every request in NORMAL MODE
3857         * - For every request in HFR mode during preview only case
3858         * - For first request of every batch in HFR mode during video
3859         * recording. In batchmode the same settings except frame number is
3860         * repeated in each request of the batch.
3861         */
3862        if (!mBatchSize ||
3863           (mBatchSize && !isVidBufRequested) ||
3864           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3865            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3866            if (rc < 0) {
3867                LOGE("fail to set frame parameters");
3868                pthread_mutex_unlock(&mMutex);
3869                return rc;
3870            }
3871        }
3872        /* For batchMode HFR, setFrameParameters is not called for every
3873         * request. But only frame number of the latest request is parsed.
3874         * Keep track of first and last frame numbers in a batch so that
3875         * metadata for the frame numbers of batch can be duplicated in
3876         * handleBatchMetadta */
3877        if (mBatchSize) {
3878            if (!mToBeQueuedVidBufs) {
3879                //start of the batch
3880                mFirstFrameNumberInBatch = request->frame_number;
3881            }
3882            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3883                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3884                LOGE("Failed to set the frame number in the parameters");
3885                return BAD_VALUE;
3886            }
3887        }
3888        if (mNeedSensorRestart) {
3889            /* Unlock the mutex as restartSensor waits on the channels to be
3890             * stopped, which in turn calls stream callback functions -
3891             * handleBufferWithLock and handleMetadataWithLock */
3892            pthread_mutex_unlock(&mMutex);
3893            rc = dynamicUpdateMetaStreamInfo();
3894            if (rc != NO_ERROR) {
3895                LOGE("Restarting the sensor failed");
3896                return BAD_VALUE;
3897            }
3898            mNeedSensorRestart = false;
3899            pthread_mutex_lock(&mMutex);
3900        }
3901    } else {
3902
3903        if (request->input_buffer->acquire_fence != -1) {
3904           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3905           close(request->input_buffer->acquire_fence);
3906           if (rc != OK) {
3907              LOGE("input buffer sync wait failed %d", rc);
3908              pthread_mutex_unlock(&mMutex);
3909              return rc;
3910           }
3911        }
3912    }
3913
3914    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3915        mLastCustIntentFrmNum = frameNumber;
3916    }
3917    /* Update pending request list and pending buffers map */
3918    PendingRequestInfo pendingRequest;
3919    pendingRequestIterator latestRequest;
3920    pendingRequest.frame_number = frameNumber;
3921    pendingRequest.num_buffers = request->num_output_buffers;
3922    pendingRequest.request_id = request_id;
3923    pendingRequest.blob_request = blob_request;
3924    pendingRequest.timestamp = 0;
3925    pendingRequest.bUrgentReceived = 0;
3926    if (request->input_buffer) {
3927        pendingRequest.input_buffer =
3928                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3929        *(pendingRequest.input_buffer) = *(request->input_buffer);
3930        pInputBuffer = pendingRequest.input_buffer;
3931    } else {
3932       pendingRequest.input_buffer = NULL;
3933       pInputBuffer = NULL;
3934    }
3935
3936    pendingRequest.pipeline_depth = 0;
3937    pendingRequest.partial_result_cnt = 0;
3938    extractJpegMetadata(mCurJpegMeta, request);
3939    pendingRequest.jpegMetadata = mCurJpegMeta;
3940    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3941    pendingRequest.shutter_notified = false;
3942
3943    //extract capture intent
3944    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3945        mCaptureIntent =
3946                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3947    }
3948    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
3949        mHybridAeEnable =
3950                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
3951    }
3952    pendingRequest.capture_intent = mCaptureIntent;
3953    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
3954    /* DevCamDebug metadata processCaptureRequest */
3955    if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
3956        mDevCamDebugMetaEnable =
3957                meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
3958    }
3959    /* DevCamDebug metadata end */
3960
3961    //extract CAC info
3962    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
3963        mCacMode =
3964                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
3965    }
3966    pendingRequest.fwkCacMode = mCacMode;
3967
3968    PendingBuffersInRequest bufsForCurRequest;
3969    bufsForCurRequest.frame_number = frameNumber;
3970    // Mark current timestamp for the new request
3971    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
3972
3973    for (size_t i = 0; i < request->num_output_buffers; i++) {
3974        RequestedBufferInfo requestedBuf;
3975        memset(&requestedBuf, 0, sizeof(requestedBuf));
3976        requestedBuf.stream = request->output_buffers[i].stream;
3977        requestedBuf.buffer = NULL;
3978        pendingRequest.buffers.push_back(requestedBuf);
3979
3980        // Add to buffer handle the pending buffers list
3981        PendingBufferInfo bufferInfo;
3982        bufferInfo.buffer = request->output_buffers[i].buffer;
3983        bufferInfo.stream = request->output_buffers[i].stream;
3984        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
3985        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3986        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3987            frameNumber, bufferInfo.buffer,
3988            channel->getStreamTypeMask(), bufferInfo.stream->format);
3989    }
3990    // Add this request packet into mPendingBuffersMap
3991    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
3992    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
3993        mPendingBuffersMap.get_num_overall_buffers());
3994
3995    latestRequest = mPendingRequestsList.insert(
3996            mPendingRequestsList.end(), pendingRequest);
3997    if(mFlush) {
3998        pthread_mutex_unlock(&mMutex);
3999        return NO_ERROR;
4000    }
4001
4002    // Notify metadata channel we receive a request
4003    mMetadataChannel->request(NULL, frameNumber);
4004
4005    if(request->input_buffer != NULL){
4006        LOGD("Input request, frame_number %d", frameNumber);
4007        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4008        if (NO_ERROR != rc) {
4009            LOGE("fail to set reproc parameters");
4010            pthread_mutex_unlock(&mMutex);
4011            return rc;
4012        }
4013    }
4014
4015    // Call request on other streams
4016    uint32_t streams_need_metadata = 0;
4017    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4018    for (size_t i = 0; i < request->num_output_buffers; i++) {
4019        const camera3_stream_buffer_t& output = request->output_buffers[i];
4020        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4021
4022        if (channel == NULL) {
4023            LOGW("invalid channel pointer for stream");
4024            continue;
4025        }
4026
4027        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4028            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4029                      output.buffer, request->input_buffer, frameNumber);
4030            if(request->input_buffer != NULL){
4031                rc = channel->request(output.buffer, frameNumber,
4032                        pInputBuffer, &mReprocMeta);
4033                if (rc < 0) {
4034                    LOGE("Fail to request on picture channel");
4035                    pthread_mutex_unlock(&mMutex);
4036                    return rc;
4037                }
4038            } else {
4039                LOGD("snapshot request with buffer %p, frame_number %d",
4040                         output.buffer, frameNumber);
4041                if (!request->settings) {
4042                    rc = channel->request(output.buffer, frameNumber,
4043                            NULL, mPrevParameters);
4044                } else {
4045                    rc = channel->request(output.buffer, frameNumber,
4046                            NULL, mParameters);
4047                }
4048                if (rc < 0) {
4049                    LOGE("Fail to request on picture channel");
4050                    pthread_mutex_unlock(&mMutex);
4051                    return rc;
4052                }
4053                pendingBufferIter->need_metadata = true;
4054                streams_need_metadata++;
4055            }
4056        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4057            bool needMetadata = false;
4058
4059            if (m_perfLock.isPerfLockTimedAcquired()) {
4060                if (m_perfLock.isTimerReset())
4061                {
4062                    m_perfLock.lock_rel_timed();
4063                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4064                }
4065            } else {
4066                m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4067            }
4068
4069            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4070            rc = yuvChannel->request(output.buffer, frameNumber,
4071                    pInputBuffer,
4072                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
4073            if (rc < 0) {
4074                LOGE("Fail to request on YUV channel");
4075                pthread_mutex_unlock(&mMutex);
4076                return rc;
4077            }
4078            pendingBufferIter->need_metadata = needMetadata;
4079            if (needMetadata)
4080                streams_need_metadata += 1;
4081            LOGD("calling YUV channel request, need_metadata is %d",
4082                     needMetadata);
4083        } else {
4084            LOGD("request with buffer %p, frame_number %d",
4085                  output.buffer, frameNumber);
4086            /* Set perf lock for API-2 zsl */
4087            if (IS_USAGE_ZSL(output.stream->usage)) {
4088                if (m_perfLock.isPerfLockTimedAcquired()) {
4089                    if (m_perfLock.isTimerReset())
4090                    {
4091                        m_perfLock.lock_rel_timed();
4092                        m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4093                    }
4094                } else {
4095                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4096                }
4097            }
4098
4099            rc = channel->request(output.buffer, frameNumber);
4100            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4101                    && mBatchSize) {
4102                mToBeQueuedVidBufs++;
4103                if (mToBeQueuedVidBufs == mBatchSize) {
4104                    channel->queueBatchBuf();
4105                }
4106            }
4107            if (rc < 0) {
4108                LOGE("request failed");
4109                pthread_mutex_unlock(&mMutex);
4110                return rc;
4111            }
4112        }
4113        pendingBufferIter++;
4114    }
4115
4116    //If 2 streams have need_metadata set to true, fail the request, unless
4117    //we copy/reference count the metadata buffer
4118    if (streams_need_metadata > 1) {
4119        LOGE("not supporting request in which two streams requires"
4120                " 2 HAL metadata for reprocessing");
4121        pthread_mutex_unlock(&mMutex);
4122        return -EINVAL;
4123    }
4124
4125    if(request->input_buffer == NULL) {
4126        /* Set the parameters to backend:
4127         * - For every request in NORMAL MODE
4128         * - For every request in HFR mode during preview only case
4129         * - Once every batch in HFR mode during video recording
4130         */
4131        if (!mBatchSize ||
4132           (mBatchSize && !isVidBufRequested) ||
4133           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4134            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4135                     mBatchSize, isVidBufRequested,
4136                    mToBeQueuedVidBufs);
4137            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4138                    mParameters);
4139            if (rc < 0) {
4140                LOGE("set_parms failed");
4141            }
4142            /* reset to zero coz, the batch is queued */
4143            mToBeQueuedVidBufs = 0;
4144            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4145        }
4146        mPendingLiveRequest++;
4147    }
4148
4149    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4150
4151    mState = STARTED;
4152    // Added a timed condition wait
4153    struct timespec ts;
4154    uint8_t isValidTimeout = 1;
4155    rc = clock_gettime(CLOCK_REALTIME, &ts);
4156    if (rc < 0) {
4157      isValidTimeout = 0;
4158      LOGE("Error reading the real time clock!!");
4159    }
4160    else {
4161      // Make timeout as 5 sec for request to be honored
4162      ts.tv_sec += 5;
4163    }
4164    //Block on conditional variable
4165    if (mBatchSize) {
4166        /* For HFR, more buffers are dequeued upfront to improve the performance */
4167        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4168        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4169    }
4170    if (m_perfLock.isPerfLockTimedAcquired() && m_perfLock.isTimerReset())
4171        m_perfLock.lock_rel_timed();
4172
4173    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
4174            (mState != ERROR) && (mState != DEINIT)) {
4175        if (!isValidTimeout) {
4176            LOGD("Blocking on conditional wait");
4177            pthread_cond_wait(&mRequestCond, &mMutex);
4178        }
4179        else {
4180            LOGD("Blocking on timed conditional wait");
4181            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4182            if (rc == ETIMEDOUT) {
4183                rc = -ENODEV;
4184                LOGE("Unblocked on timeout!!!!");
4185                break;
4186            }
4187        }
4188        LOGD("Unblocked");
4189        if (mWokenUpByDaemon) {
4190            mWokenUpByDaemon = false;
4191            if (mPendingLiveRequest < maxInFlightRequests)
4192                break;
4193        }
4194    }
4195    pthread_mutex_unlock(&mMutex);
4196
4197    return rc;
4198}
4199
4200/*===========================================================================
4201 * FUNCTION   : dump
4202 *
4203 * DESCRIPTION:
4204 *
4205 * PARAMETERS :
4206 *
4207 *
4208 * RETURN     :
4209 *==========================================================================*/
4210void QCamera3HardwareInterface::dump(int fd)
4211{
4212    pthread_mutex_lock(&mMutex);
4213    dprintf(fd, "\n Camera HAL3 information Begin \n");
4214
4215    dprintf(fd, "\nNumber of pending requests: %zu \n",
4216        mPendingRequestsList.size());
4217    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4218    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4219    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4220    for(pendingRequestIterator i = mPendingRequestsList.begin();
4221            i != mPendingRequestsList.end(); i++) {
4222        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4223        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4224        i->input_buffer);
4225    }
4226    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4227                mPendingBuffersMap.get_num_overall_buffers());
4228    dprintf(fd, "-------+------------------\n");
4229    dprintf(fd, " Frame | Stream type mask \n");
4230    dprintf(fd, "-------+------------------\n");
4231    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4232        for(auto &j : req.mPendingBufferList) {
4233            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4234            dprintf(fd, " %5d | %11d \n",
4235                    req.frame_number, channel->getStreamTypeMask());
4236        }
4237    }
4238    dprintf(fd, "-------+------------------\n");
4239
4240    dprintf(fd, "\nPending frame drop list: %zu\n",
4241        mPendingFrameDropList.size());
4242    dprintf(fd, "-------+-----------\n");
4243    dprintf(fd, " Frame | Stream ID \n");
4244    dprintf(fd, "-------+-----------\n");
4245    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4246        i != mPendingFrameDropList.end(); i++) {
4247        dprintf(fd, " %5d | %9d \n",
4248            i->frame_number, i->stream_ID);
4249    }
4250    dprintf(fd, "-------+-----------\n");
4251
4252    dprintf(fd, "\n Camera HAL3 information End \n");
4253
4254    /* use dumpsys media.camera as trigger to send update debug level event */
4255    mUpdateDebugLevel = true;
4256    pthread_mutex_unlock(&mMutex);
4257    return;
4258}
4259
4260/*===========================================================================
4261 * FUNCTION   : flush
4262 *
4263 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4264 *              conditionally restarts channels
4265 *
4266 * PARAMETERS :
4267 *  @ restartChannels: re-start all channels
4268 *
4269 *
4270 * RETURN     :
4271 *          0 on success
4272 *          Error code on failure
4273 *==========================================================================*/
4274int QCamera3HardwareInterface::flush(bool restartChannels)
4275{
4276    KPI_ATRACE_CALL();
4277    int32_t rc = NO_ERROR;
4278
4279    LOGD("Unblocking Process Capture Request");
4280    pthread_mutex_lock(&mMutex);
4281    mFlush = true;
4282    pthread_mutex_unlock(&mMutex);
4283
4284    rc = stopAllChannels();
4285    // unlink of dualcam
4286    if (mIsDeviceLinked) {
4287        m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4288        pthread_mutex_lock(&gCamLock);
4289
4290        if (mIsMainCamera == 1) {
4291            m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4292            m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4293            // related session id should be session id of linked session
4294            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4295        } else {
4296            m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4297            m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4298            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4299        }
4300        pthread_mutex_unlock(&gCamLock);
4301
4302        rc = mCameraHandle->ops->sync_related_sensors(
4303                mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4304        if (rc < 0) {
4305            LOGE("Dualcam: Unlink failed, but still proceed to close");
4306        }
4307    }
4308
4309    if (rc < 0) {
4310        LOGE("stopAllChannels failed");
4311        return rc;
4312    }
4313    if (mChannelHandle) {
4314        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4315                mChannelHandle);
4316    }
4317
4318    // Reset bundle info
4319    rc = setBundleInfo();
4320    if (rc < 0) {
4321        LOGE("setBundleInfo failed %d", rc);
4322        return rc;
4323    }
4324
4325    // Mutex Lock
4326    pthread_mutex_lock(&mMutex);
4327
4328    // Unblock process_capture_request
4329    mPendingLiveRequest = 0;
4330    pthread_cond_signal(&mRequestCond);
4331
4332    rc = notifyErrorForPendingRequests();
4333    if (rc < 0) {
4334        LOGE("notifyErrorForPendingRequests failed");
4335        pthread_mutex_unlock(&mMutex);
4336        return rc;
4337    }
4338
4339    mFlush = false;
4340
4341    // Start the Streams/Channels
4342    if (restartChannels) {
4343        rc = startAllChannels();
4344        if (rc < 0) {
4345            LOGE("startAllChannels failed");
4346            pthread_mutex_unlock(&mMutex);
4347            return rc;
4348        }
4349    }
4350
4351    if (mChannelHandle) {
4352        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4353                    mChannelHandle);
4354        if (rc < 0) {
4355            LOGE("start_channel failed");
4356            pthread_mutex_unlock(&mMutex);
4357            return rc;
4358        }
4359    }
4360
4361    pthread_mutex_unlock(&mMutex);
4362
4363    return 0;
4364}
4365
4366/*===========================================================================
4367 * FUNCTION   : flushPerf
4368 *
4369 * DESCRIPTION: This is the performance optimization version of flush that does
4370 *              not use stream off, rather flushes the system
4371 *
4372 * PARAMETERS :
4373 *
4374 *
4375 * RETURN     : 0 : success
4376 *              -EINVAL: input is malformed (device is not valid)
4377 *              -ENODEV: if the device has encountered a serious error
4378 *==========================================================================*/
4379int QCamera3HardwareInterface::flushPerf()
4380{
4381    ATRACE_CALL();
4382    int32_t rc = 0;
4383    struct timespec timeout;
4384    bool timed_wait = false;
4385
4386    pthread_mutex_lock(&mMutex);
4387    mFlushPerf = true;
4388    mPendingBuffersMap.numPendingBufsAtFlush =
4389        mPendingBuffersMap.get_num_overall_buffers();
4390    LOGD("Calling flush. Wait for %d buffers to return",
4391        mPendingBuffersMap.numPendingBufsAtFlush);
4392
4393    /* send the flush event to the backend */
4394    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4395    if (rc < 0) {
4396        LOGE("Error in flush: IOCTL failure");
4397        mFlushPerf = false;
4398        pthread_mutex_unlock(&mMutex);
4399        return -ENODEV;
4400    }
4401
4402    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4403        LOGD("No pending buffers in HAL, return flush");
4404        mFlushPerf = false;
4405        pthread_mutex_unlock(&mMutex);
4406        return rc;
4407    }
4408
4409    /* wait on a signal that buffers were received */
4410    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4411    if (rc < 0) {
4412        LOGE("Error reading the real time clock, cannot use timed wait");
4413    } else {
4414        timeout.tv_sec += FLUSH_TIMEOUT;
4415        timed_wait = true;
4416    }
4417
4418    //Block on conditional variable
4419    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4420        LOGD("Waiting on mBuffersCond");
4421        if (!timed_wait) {
4422            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4423            if (rc != 0) {
4424                 LOGE("pthread_cond_wait failed due to rc = %s",
4425                        strerror(rc));
4426                 break;
4427            }
4428        } else {
4429            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4430            if (rc != 0) {
4431                LOGE("pthread_cond_timedwait failed due to rc = %s",
4432                            strerror(rc));
4433                break;
4434            }
4435        }
4436    }
4437    if (rc != 0) {
4438        mFlushPerf = false;
4439        pthread_mutex_unlock(&mMutex);
4440        return -ENODEV;
4441    }
4442
4443    LOGD("Received buffers, now safe to return them");
4444
4445    //make sure the channels handle flush
4446    //currently only required for the picture channel to release snapshot resources
4447    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4448            it != mStreamInfo.end(); it++) {
4449        QCamera3Channel *channel = (*it)->channel;
4450        if (channel) {
4451            rc = channel->flush();
4452            if (rc) {
4453               LOGE("Flushing the channels failed with error %d", rc);
4454               // even though the channel flush failed we need to continue and
4455               // return the buffers we have to the framework, however the return
4456               // value will be an error
4457               rc = -ENODEV;
4458            }
4459        }
4460    }
4461
4462    /* notify the frameworks and send errored results */
4463    rc = notifyErrorForPendingRequests();
4464    if (rc < 0) {
4465        LOGE("notifyErrorForPendingRequests failed");
4466        pthread_mutex_unlock(&mMutex);
4467        return rc;
4468    }
4469
4470    //unblock process_capture_request
4471    mPendingLiveRequest = 0;
4472    unblockRequestIfNecessary();
4473
4474    mFlushPerf = false;
4475    pthread_mutex_unlock(&mMutex);
4476    LOGD ("Flush Operation complete. rc = %d", rc);
4477    return rc;
4478}
4479
4480/*===========================================================================
4481 * FUNCTION   : handleCameraDeviceError
4482 *
4483 * DESCRIPTION: This function calls internal flush and notifies the error to
4484 *              framework and updates the state variable.
4485 *
4486 * PARAMETERS : None
4487 *
4488 * RETURN     : NO_ERROR on Success
4489 *              Error code on failure
4490 *==========================================================================*/
4491int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4492{
4493    int32_t rc = NO_ERROR;
4494
4495    pthread_mutex_lock(&mMutex);
4496    if (mState != ERROR) {
4497        //if mState != ERROR, nothing to be done
4498        pthread_mutex_unlock(&mMutex);
4499        return NO_ERROR;
4500    }
4501    pthread_mutex_unlock(&mMutex);
4502
4503    rc = flush(false /* restart channels */);
4504    if (NO_ERROR != rc) {
4505        LOGE("internal flush to handle mState = ERROR failed");
4506    }
4507
4508    pthread_mutex_lock(&mMutex);
4509    mState = DEINIT;
4510    pthread_mutex_unlock(&mMutex);
4511
4512    camera3_notify_msg_t notify_msg;
4513    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4514    notify_msg.type = CAMERA3_MSG_ERROR;
4515    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4516    notify_msg.message.error.error_stream = NULL;
4517    notify_msg.message.error.frame_number = 0;
4518    mCallbackOps->notify(mCallbackOps, &notify_msg);
4519
4520    return rc;
4521}
4522
4523/*===========================================================================
4524 * FUNCTION   : captureResultCb
4525 *
4526 * DESCRIPTION: Callback handler for all capture result
4527 *              (streams, as well as metadata)
4528 *
4529 * PARAMETERS :
4530 *   @metadata : metadata information
4531 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4532 *               NULL if metadata.
4533 *
4534 * RETURN     : NONE
4535 *==========================================================================*/
4536void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4537                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4538{
4539    if (metadata_buf) {
4540        if (mBatchSize) {
4541            handleBatchMetadata(metadata_buf,
4542                    true /* free_and_bufdone_meta_buf */);
4543        } else { /* mBatchSize = 0 */
4544            hdrPlusPerfLock(metadata_buf);
4545            pthread_mutex_lock(&mMutex);
4546            handleMetadataWithLock(metadata_buf,
4547                    true /* free_and_bufdone_meta_buf */);
4548            pthread_mutex_unlock(&mMutex);
4549        }
4550    } else if (isInputBuffer) {
4551        pthread_mutex_lock(&mMutex);
4552        handleInputBufferWithLock(frame_number);
4553        pthread_mutex_unlock(&mMutex);
4554    } else {
4555        pthread_mutex_lock(&mMutex);
4556        handleBufferWithLock(buffer, frame_number);
4557        pthread_mutex_unlock(&mMutex);
4558    }
4559    return;
4560}
4561
4562/*===========================================================================
4563 * FUNCTION   : getReprocessibleOutputStreamId
4564 *
4565 * DESCRIPTION: Get source output stream id for the input reprocess stream
4566 *              based on size and format, which would be the largest
4567 *              output stream if an input stream exists.
4568 *
4569 * PARAMETERS :
4570 *   @id      : return the stream id if found
4571 *
4572 * RETURN     : int32_t type of status
4573 *              NO_ERROR  -- success
4574 *              none-zero failure code
4575 *==========================================================================*/
4576int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4577{
4578    /* check if any output or bidirectional stream with the same size and format
4579       and return that stream */
4580    if ((mInputStreamInfo.dim.width > 0) &&
4581            (mInputStreamInfo.dim.height > 0)) {
4582        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4583                it != mStreamInfo.end(); it++) {
4584
4585            camera3_stream_t *stream = (*it)->stream;
4586            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4587                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4588                    (stream->format == mInputStreamInfo.format)) {
4589                // Usage flag for an input stream and the source output stream
4590                // may be different.
4591                LOGD("Found reprocessible output stream! %p", *it);
4592                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4593                         stream->usage, mInputStreamInfo.usage);
4594
4595                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4596                if (channel != NULL && channel->mStreams[0]) {
4597                    id = channel->mStreams[0]->getMyServerID();
4598                    return NO_ERROR;
4599                }
4600            }
4601        }
4602    } else {
4603        LOGD("No input stream, so no reprocessible output stream");
4604    }
4605    return NAME_NOT_FOUND;
4606}
4607
4608/*===========================================================================
4609 * FUNCTION   : lookupFwkName
4610 *
4611 * DESCRIPTION: In case the enum is not same in fwk and backend
4612 *              make sure the parameter is correctly propogated
4613 *
4614 * PARAMETERS  :
4615 *   @arr      : map between the two enums
4616 *   @len      : len of the map
4617 *   @hal_name : name of the hal_parm to map
4618 *
4619 * RETURN     : int type of status
4620 *              fwk_name  -- success
4621 *              none-zero failure code
4622 *==========================================================================*/
4623template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4624        size_t len, halType hal_name)
4625{
4626
4627    for (size_t i = 0; i < len; i++) {
4628        if (arr[i].hal_name == hal_name) {
4629            return arr[i].fwk_name;
4630        }
4631    }
4632
4633    /* Not able to find matching framework type is not necessarily
4634     * an error case. This happens when mm-camera supports more attributes
4635     * than the frameworks do */
4636    LOGH("Cannot find matching framework type");
4637    return NAME_NOT_FOUND;
4638}
4639
4640/*===========================================================================
4641 * FUNCTION   : lookupHalName
4642 *
4643 * DESCRIPTION: In case the enum is not same in fwk and backend
4644 *              make sure the parameter is correctly propogated
4645 *
4646 * PARAMETERS  :
4647 *   @arr      : map between the two enums
4648 *   @len      : len of the map
4649 *   @fwk_name : name of the hal_parm to map
4650 *
4651 * RETURN     : int32_t type of status
4652 *              hal_name  -- success
4653 *              none-zero failure code
4654 *==========================================================================*/
4655template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4656        size_t len, fwkType fwk_name)
4657{
4658    for (size_t i = 0; i < len; i++) {
4659        if (arr[i].fwk_name == fwk_name) {
4660            return arr[i].hal_name;
4661        }
4662    }
4663
4664    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4665    return NAME_NOT_FOUND;
4666}
4667
4668/*===========================================================================
4669 * FUNCTION   : lookupProp
4670 *
4671 * DESCRIPTION: lookup a value by its name
4672 *
4673 * PARAMETERS :
4674 *   @arr     : map between the two enums
4675 *   @len     : size of the map
4676 *   @name    : name to be looked up
4677 *
4678 * RETURN     : Value if found
4679 *              CAM_CDS_MODE_MAX if not found
4680 *==========================================================================*/
4681template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4682        size_t len, const char *name)
4683{
4684    if (name) {
4685        for (size_t i = 0; i < len; i++) {
4686            if (!strcmp(arr[i].desc, name)) {
4687                return arr[i].val;
4688            }
4689        }
4690    }
4691    return CAM_CDS_MODE_MAX;
4692}
4693
4694/*===========================================================================
4695 *
4696 * DESCRIPTION:
4697 *
4698 * PARAMETERS :
4699 *   @metadata : metadata information from callback
4700 *   @timestamp: metadata buffer timestamp
4701 *   @request_id: request id
4702 *   @jpegMetadata: additional jpeg metadata
4703 *   @hybrid_ae_enable: whether hybrid ae is enabled
4704 *   // DevCamDebug metadata
4705 *   @DevCamDebug_meta_enable: enable DevCamDebug meta
4706 *   // DevCamDebug metadata end
4707 *   @pprocDone: whether internal offline postprocsesing is done
4708 *
4709 * RETURN     : camera_metadata_t*
4710 *              metadata in a format specified by fwk
4711 *==========================================================================*/
4712camera_metadata_t*
4713QCamera3HardwareInterface::translateFromHalMetadata(
4714                                 metadata_buffer_t *metadata,
4715                                 nsecs_t timestamp,
4716                                 int32_t request_id,
4717                                 const CameraMetadata& jpegMetadata,
4718                                 uint8_t pipeline_depth,
4719                                 uint8_t capture_intent,
4720                                 uint8_t hybrid_ae_enable,
4721                                 /* DevCamDebug metadata translateFromHalMetadata argument */
4722                                 uint8_t DevCamDebug_meta_enable,
4723                                 /* DevCamDebug metadata end */
4724                                 bool pprocDone,
4725                                 uint8_t fwk_cacMode)
4726{
4727    CameraMetadata camMetadata;
4728    camera_metadata_t *resultMetadata;
4729
4730    if (jpegMetadata.entryCount())
4731        camMetadata.append(jpegMetadata);
4732
4733    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4734    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4735    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4736    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4737    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4738    /* DevCamDebug metadata translateFromHalMetadata */
4739    camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
4740    IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
4741            CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
4742        int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
4743        camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
4744    }
4745    /* DevCamDebug metadata end */
4746
4747
4748    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4749        int64_t fwk_frame_number = *frame_number;
4750        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4751    }
4752
4753    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4754        int32_t fps_range[2];
4755        fps_range[0] = (int32_t)float_range->min_fps;
4756        fps_range[1] = (int32_t)float_range->max_fps;
4757        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4758                                      fps_range, 2);
4759        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4760             fps_range[0], fps_range[1]);
4761    }
4762
4763    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4764        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4765    }
4766
4767    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4768        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4769                METADATA_MAP_SIZE(SCENE_MODES_MAP),
4770                *sceneMode);
4771        if (NAME_NOT_FOUND != val) {
4772            uint8_t fwkSceneMode = (uint8_t)val;
4773            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4774            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4775                     fwkSceneMode);
4776        }
4777    }
4778
4779    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4780        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4781        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4782    }
4783
4784    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4785        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4786        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4787    }
4788
4789    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4790        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4791        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4792    }
4793
4794    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4795            CAM_INTF_META_EDGE_MODE, metadata) {
4796        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4797    }
4798
4799    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4800        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4801        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4802    }
4803
4804    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4805        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4806    }
4807
4808    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4809        if (0 <= *flashState) {
4810            uint8_t fwk_flashState = (uint8_t) *flashState;
4811            if (!gCamCapability[mCameraId]->flash_available) {
4812                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4813            }
4814            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4815        }
4816    }
4817
4818    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4819        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4820        if (NAME_NOT_FOUND != val) {
4821            uint8_t fwk_flashMode = (uint8_t)val;
4822            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4823        }
4824    }
4825
4826    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4827        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4828        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4829    }
4830
4831    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4832        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4833    }
4834
4835    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4836        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4837    }
4838
4839    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4840        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4841    }
4842
4843    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4844        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4845        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4846    }
4847
4848    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4849        uint8_t fwk_videoStab = (uint8_t) *videoStab;
4850        LOGD("fwk_videoStab = %d", fwk_videoStab);
4851        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4852    } else {
4853        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
4854        // and so hardcoding the Video Stab result to OFF mode.
4855        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4856        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
4857        LOGD("%s: EIS result default to OFF mode", __func__);
4858    }
4859
4860    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4861        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4862        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4863    }
4864
4865    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4866        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4867    }
4868
4869    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4870        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4871
4872        LOGD("dynamicblackLevel = %f %f %f %f",
4873          blackLevelSourcePattern->cam_black_level[0],
4874          blackLevelSourcePattern->cam_black_level[1],
4875          blackLevelSourcePattern->cam_black_level[2],
4876          blackLevelSourcePattern->cam_black_level[3]);
4877    }
4878
4879    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4880        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4881        float fwk_blackLevelInd[4];
4882
4883        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4884        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4885        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4886        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4887
4888        LOGD("applied dynamicblackLevel = %f %f %f %f",
4889          blackLevelAppliedPattern->cam_black_level[0],
4890          blackLevelAppliedPattern->cam_black_level[1],
4891          blackLevelAppliedPattern->cam_black_level[2],
4892          blackLevelAppliedPattern->cam_black_level[3]);
4893        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4894
4895        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
4896        // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4897        // depth space.
4898        fwk_blackLevelInd[0] /= 64.0;
4899        fwk_blackLevelInd[1] /= 64.0;
4900        fwk_blackLevelInd[2] /= 64.0;
4901        fwk_blackLevelInd[3] /= 64.0;
4902        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4903    }
4904
4905    // Fixed whitelevel is used by ISP/Sensor
4906    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4907            &gCamCapability[mCameraId]->white_level, 1);
4908
4909    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4910            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4911        int32_t scalerCropRegion[4];
4912        scalerCropRegion[0] = hScalerCropRegion->left;
4913        scalerCropRegion[1] = hScalerCropRegion->top;
4914        scalerCropRegion[2] = hScalerCropRegion->width;
4915        scalerCropRegion[3] = hScalerCropRegion->height;
4916
4917        // Adjust crop region from sensor output coordinate system to active
4918        // array coordinate system.
4919        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4920                scalerCropRegion[2], scalerCropRegion[3]);
4921
4922        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4923    }
4924
4925    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4926        LOGD("sensorExpTime = %lld", *sensorExpTime);
4927        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4928    }
4929
4930    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4931            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4932        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
4933        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4934    }
4935
4936    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4937            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4938        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
4939        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4940                sensorRollingShutterSkew, 1);
4941    }
4942
4943    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4944        LOGD("sensorSensitivity = %d", *sensorSensitivity);
4945        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4946
4947        //calculate the noise profile based on sensitivity
4948        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4949        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4950        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4951        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4952            noise_profile[i]   = noise_profile_S;
4953            noise_profile[i+1] = noise_profile_O;
4954        }
4955        LOGD("noise model entry (S, O) is (%f, %f)",
4956                noise_profile_S, noise_profile_O);
4957        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4958                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4959    }
4960
4961    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
4962        int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
4963        camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
4964    }
4965
4966    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4967        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4968        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4969    }
4970
4971    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4972        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4973                *faceDetectMode);
4974        if (NAME_NOT_FOUND != val) {
4975            uint8_t fwk_faceDetectMode = (uint8_t)val;
4976            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4977
4978            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4979                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4980                        CAM_INTF_META_FACE_DETECTION, metadata) {
4981                    uint8_t numFaces = MIN(
4982                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4983                    int32_t faceIds[MAX_ROI];
4984                    uint8_t faceScores[MAX_ROI];
4985                    int32_t faceRectangles[MAX_ROI * 4];
4986                    int32_t faceLandmarks[MAX_ROI * 6];
4987                    size_t j = 0, k = 0;
4988
4989                    for (size_t i = 0; i < numFaces; i++) {
4990                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4991                        // Adjust crop region from sensor output coordinate system to active
4992                        // array coordinate system.
4993                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4994                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4995                                rect.width, rect.height);
4996
4997                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4998                                faceRectangles+j, -1);
4999
5000                        j+= 4;
5001                    }
5002                    if (numFaces <= 0) {
5003                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5004                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5005                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5006                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5007                    }
5008
5009                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5010                            numFaces);
5011                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5012                            faceRectangles, numFaces * 4U);
5013                    if (fwk_faceDetectMode ==
5014                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5015                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5016                                CAM_INTF_META_FACE_LANDMARK, metadata) {
5017
5018                            for (size_t i = 0; i < numFaces; i++) {
5019                                // Map the co-ordinate sensor output coordinate system to active
5020                                // array coordinate system.
5021                                mCropRegionMapper.toActiveArray(
5022                                        landmarks->face_landmarks[i].left_eye_center.x,
5023                                        landmarks->face_landmarks[i].left_eye_center.y);
5024                                mCropRegionMapper.toActiveArray(
5025                                        landmarks->face_landmarks[i].right_eye_center.x,
5026                                        landmarks->face_landmarks[i].right_eye_center.y);
5027                                mCropRegionMapper.toActiveArray(
5028                                        landmarks->face_landmarks[i].mouth_center.x,
5029                                        landmarks->face_landmarks[i].mouth_center.y);
5030
5031                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5032                                k+= 6;
5033                            }
5034                        }
5035
5036                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5037                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5038                                faceLandmarks, numFaces * 6U);
5039                   }
5040                }
5041            }
5042        }
5043    }
5044
5045    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5046        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5047        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5048    }
5049
5050    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5051            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5052        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5053        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5054    }
5055
5056    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5057            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5058        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5059                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5060    }
5061
5062    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5063            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5064        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5065                CAM_MAX_SHADING_MAP_HEIGHT);
5066        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5067                CAM_MAX_SHADING_MAP_WIDTH);
5068        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5069                lensShadingMap->lens_shading, 4U * map_width * map_height);
5070    }
5071
5072    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5073        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5074        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5075    }
5076
5077    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5078        //Populate CAM_INTF_META_TONEMAP_CURVES
5079        /* ch0 = G, ch 1 = B, ch 2 = R*/
5080        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5081            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5082                     tonemap->tonemap_points_cnt,
5083                    CAM_MAX_TONEMAP_CURVE_SIZE);
5084            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5085        }
5086
5087        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5088                        &tonemap->curves[0].tonemap_points[0][0],
5089                        tonemap->tonemap_points_cnt * 2);
5090
5091        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5092                        &tonemap->curves[1].tonemap_points[0][0],
5093                        tonemap->tonemap_points_cnt * 2);
5094
5095        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5096                        &tonemap->curves[2].tonemap_points[0][0],
5097                        tonemap->tonemap_points_cnt * 2);
5098    }
5099
5100    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5101            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5102        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5103                CC_GAINS_COUNT);
5104    }
5105
5106    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5107            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5108        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5109                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5110                CC_MATRIX_COLS * CC_MATRIX_ROWS);
5111    }
5112
5113    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5114            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5115        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5116            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5117                     toneCurve->tonemap_points_cnt,
5118                    CAM_MAX_TONEMAP_CURVE_SIZE);
5119            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5120        }
5121        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5122                (float*)toneCurve->curve.tonemap_points,
5123                toneCurve->tonemap_points_cnt * 2);
5124    }
5125
5126    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5127            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5128        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5129                predColorCorrectionGains->gains, 4);
5130    }
5131
5132    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5133            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5134        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5135                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5136                CC_MATRIX_ROWS * CC_MATRIX_COLS);
5137    }
5138
5139    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5140        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5141    }
5142
5143    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5144        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5145        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5146    }
5147
5148    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5149        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5150        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5151    }
5152
5153    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5154        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5155                *effectMode);
5156        if (NAME_NOT_FOUND != val) {
5157            uint8_t fwk_effectMode = (uint8_t)val;
5158            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5159        }
5160    }
5161
5162    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5163            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5164        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5165                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5166        if (NAME_NOT_FOUND != fwk_testPatternMode) {
5167            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5168        }
5169        int32_t fwk_testPatternData[4];
5170        fwk_testPatternData[0] = testPatternData->r;
5171        fwk_testPatternData[3] = testPatternData->b;
5172        switch (gCamCapability[mCameraId]->color_arrangement) {
5173        case CAM_FILTER_ARRANGEMENT_RGGB:
5174        case CAM_FILTER_ARRANGEMENT_GRBG:
5175            fwk_testPatternData[1] = testPatternData->gr;
5176            fwk_testPatternData[2] = testPatternData->gb;
5177            break;
5178        case CAM_FILTER_ARRANGEMENT_GBRG:
5179        case CAM_FILTER_ARRANGEMENT_BGGR:
5180            fwk_testPatternData[2] = testPatternData->gr;
5181            fwk_testPatternData[1] = testPatternData->gb;
5182            break;
5183        default:
5184            LOGE("color arrangement %d is not supported",
5185                gCamCapability[mCameraId]->color_arrangement);
5186            break;
5187        }
5188        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5189    }
5190
5191    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5192        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5193    }
5194
5195    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5196        String8 str((const char *)gps_methods);
5197        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5198    }
5199
5200    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5201        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5202    }
5203
5204    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5205        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5206    }
5207
5208    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5209        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5210        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5211    }
5212
5213    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5214        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5215        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5216    }
5217
5218    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5219        int32_t fwk_thumb_size[2];
5220        fwk_thumb_size[0] = thumb_size->width;
5221        fwk_thumb_size[1] = thumb_size->height;
5222        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5223    }
5224
5225    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5226        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5227                privateData,
5228                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5229    }
5230
5231    if (metadata->is_tuning_params_valid) {
5232        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5233        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5234        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5235
5236
5237        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5238                sizeof(uint32_t));
5239        data += sizeof(uint32_t);
5240
5241        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5242                sizeof(uint32_t));
5243        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5244        data += sizeof(uint32_t);
5245
5246        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5247                sizeof(uint32_t));
5248        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5249        data += sizeof(uint32_t);
5250
5251        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5252                sizeof(uint32_t));
5253        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5254        data += sizeof(uint32_t);
5255
5256        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5257                sizeof(uint32_t));
5258        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5259        data += sizeof(uint32_t);
5260
5261        metadata->tuning_params.tuning_mod3_data_size = 0;
5262        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5263                sizeof(uint32_t));
5264        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5265        data += sizeof(uint32_t);
5266
5267        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5268                TUNING_SENSOR_DATA_MAX);
5269        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5270                count);
5271        data += count;
5272
5273        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5274                TUNING_VFE_DATA_MAX);
5275        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5276                count);
5277        data += count;
5278
5279        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5280                TUNING_CPP_DATA_MAX);
5281        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5282                count);
5283        data += count;
5284
5285        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5286                TUNING_CAC_DATA_MAX);
5287        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5288                count);
5289        data += count;
5290
5291        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5292                (int32_t *)(void *)tuning_meta_data_blob,
5293                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5294    }
5295
5296    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5297            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5298        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5299                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5300                NEUTRAL_COL_POINTS);
5301    }
5302
5303    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5304        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5305        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5306    }
5307
5308    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5309        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5310        // Adjust crop region from sensor output coordinate system to active
5311        // array coordinate system.
5312        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5313                hAeRegions->rect.width, hAeRegions->rect.height);
5314
5315        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5316        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5317                REGIONS_TUPLE_COUNT);
5318        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5319                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5320                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5321                hAeRegions->rect.height);
5322    }
5323
5324    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5325        uint8_t fwk_afState = (uint8_t) *afState;
5326        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5327        LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5328    }
5329
5330    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5331        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5332    }
5333
5334    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5335        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5336    }
5337
5338    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5339        uint8_t fwk_lensState = *lensState;
5340        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5341    }
5342
5343    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5344        /*af regions*/
5345        int32_t afRegions[REGIONS_TUPLE_COUNT];
5346        // Adjust crop region from sensor output coordinate system to active
5347        // array coordinate system.
5348        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5349                hAfRegions->rect.width, hAfRegions->rect.height);
5350
5351        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5352        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5353                REGIONS_TUPLE_COUNT);
5354        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5355                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5356                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5357                hAfRegions->rect.height);
5358    }
5359
5360    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5361        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5362                *hal_ab_mode);
5363        if (NAME_NOT_FOUND != val) {
5364            uint8_t fwk_ab_mode = (uint8_t)val;
5365            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5366        }
5367    }
5368
5369    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5370        int val = lookupFwkName(SCENE_MODES_MAP,
5371                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5372        if (NAME_NOT_FOUND != val) {
5373            uint8_t fwkBestshotMode = (uint8_t)val;
5374            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5375            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5376        } else {
5377            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5378        }
5379    }
5380
5381    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5382         uint8_t fwk_mode = (uint8_t) *mode;
5383         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5384    }
5385
5386    /* Constant metadata values to be update*/
5387    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5388    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5389
5390    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5391    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5392
5393    int32_t hotPixelMap[2];
5394    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5395
5396    // CDS
5397    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5398        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5399    }
5400
5401    // TNR
5402    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5403        uint8_t tnr_enable       = tnr->denoise_enable;
5404        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5405
5406        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5407        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5408    }
5409
5410    // Reprocess crop data
5411    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5412        uint8_t cnt = crop_data->num_of_streams;
5413        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5414            // mm-qcamera-daemon only posts crop_data for streams
5415            // not linked to pproc. So no valid crop metadata is not
5416            // necessarily an error case.
5417            LOGD("No valid crop metadata entries");
5418        } else {
5419            uint32_t reproc_stream_id;
5420            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5421                LOGD("No reprocessible stream found, ignore crop data");
5422            } else {
5423                int rc = NO_ERROR;
5424                Vector<int32_t> roi_map;
5425                int32_t *crop = new int32_t[cnt*4];
5426                if (NULL == crop) {
5427                   rc = NO_MEMORY;
5428                }
5429                if (NO_ERROR == rc) {
5430                    int32_t streams_found = 0;
5431                    for (size_t i = 0; i < cnt; i++) {
5432                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5433                            if (pprocDone) {
5434                                // HAL already does internal reprocessing,
5435                                // either via reprocessing before JPEG encoding,
5436                                // or offline postprocessing for pproc bypass case.
5437                                crop[0] = 0;
5438                                crop[1] = 0;
5439                                crop[2] = mInputStreamInfo.dim.width;
5440                                crop[3] = mInputStreamInfo.dim.height;
5441                            } else {
5442                                crop[0] = crop_data->crop_info[i].crop.left;
5443                                crop[1] = crop_data->crop_info[i].crop.top;
5444                                crop[2] = crop_data->crop_info[i].crop.width;
5445                                crop[3] = crop_data->crop_info[i].crop.height;
5446                            }
5447                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5448                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5449                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5450                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5451                            streams_found++;
5452                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5453                                    crop[0], crop[1], crop[2], crop[3]);
5454                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5455                                    crop_data->crop_info[i].roi_map.left,
5456                                    crop_data->crop_info[i].roi_map.top,
5457                                    crop_data->crop_info[i].roi_map.width,
5458                                    crop_data->crop_info[i].roi_map.height);
5459                            break;
5460
5461                       }
5462                    }
5463                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5464                            &streams_found, 1);
5465                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5466                            crop, (size_t)(streams_found * 4));
5467                    if (roi_map.array()) {
5468                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5469                                roi_map.array(), roi_map.size());
5470                    }
5471               }
5472               if (crop) {
5473                   delete [] crop;
5474               }
5475            }
5476        }
5477    }
5478
5479    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5480        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5481        // so hardcoding the CAC result to OFF mode.
5482        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5483        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5484    } else {
5485        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5486            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5487                    *cacMode);
5488            if (NAME_NOT_FOUND != val) {
5489                uint8_t resultCacMode = (uint8_t)val;
5490                // check whether CAC result from CB is equal to Framework set CAC mode
5491                // If not equal then set the CAC mode came in corresponding request
5492                if (fwk_cacMode != resultCacMode) {
5493                    resultCacMode = fwk_cacMode;
5494                }
5495                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5496                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5497            } else {
5498                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5499            }
5500        }
5501    }
5502
5503    // Post blob of cam_cds_data through vendor tag.
5504    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5505        uint8_t cnt = cdsInfo->num_of_streams;
5506        cam_cds_data_t cdsDataOverride;
5507        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5508        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5509        cdsDataOverride.num_of_streams = 1;
5510        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5511            uint32_t reproc_stream_id;
5512            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5513                LOGD("No reprocessible stream found, ignore cds data");
5514            } else {
5515                for (size_t i = 0; i < cnt; i++) {
5516                    if (cdsInfo->cds_info[i].stream_id ==
5517                            reproc_stream_id) {
5518                        cdsDataOverride.cds_info[0].cds_enable =
5519                                cdsInfo->cds_info[i].cds_enable;
5520                        break;
5521                    }
5522                }
5523            }
5524        } else {
5525            LOGD("Invalid stream count %d in CDS_DATA", cnt);
5526        }
5527        camMetadata.update(QCAMERA3_CDS_INFO,
5528                (uint8_t *)&cdsDataOverride,
5529                sizeof(cam_cds_data_t));
5530    }
5531
5532    // Ldaf calibration data
5533    if (!mLdafCalibExist) {
5534        IF_META_AVAILABLE(uint32_t, ldafCalib,
5535                CAM_INTF_META_LDAF_EXIF, metadata) {
5536            mLdafCalibExist = true;
5537            mLdafCalib[0] = ldafCalib[0];
5538            mLdafCalib[1] = ldafCalib[1];
5539            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5540                    ldafCalib[0], ldafCalib[1]);
5541        }
5542    }
5543
5544    resultMetadata = camMetadata.release();
5545    return resultMetadata;
5546}
5547
5548/*===========================================================================
5549 * FUNCTION   : saveExifParams
5550 *
5551 * DESCRIPTION:
5552 *
5553 * PARAMETERS :
5554 *   @metadata : metadata information from callback
5555 *
5556 * RETURN     : none
5557 *
5558 *==========================================================================*/
5559void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5560{
5561    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5562            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5563        if (mExifParams.debug_params) {
5564            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5565            mExifParams.debug_params->ae_debug_params_valid = TRUE;
5566        }
5567    }
5568    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5569            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5570        if (mExifParams.debug_params) {
5571            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5572            mExifParams.debug_params->awb_debug_params_valid = TRUE;
5573        }
5574    }
5575    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5576            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5577        if (mExifParams.debug_params) {
5578            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5579            mExifParams.debug_params->af_debug_params_valid = TRUE;
5580        }
5581    }
5582    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5583            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5584        if (mExifParams.debug_params) {
5585            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5586            mExifParams.debug_params->asd_debug_params_valid = TRUE;
5587        }
5588    }
5589    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5590            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5591        if (mExifParams.debug_params) {
5592            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5593            mExifParams.debug_params->stats_debug_params_valid = TRUE;
5594        }
5595    }
5596    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
5597            CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
5598        if (mExifParams.debug_params) {
5599            mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
5600            mExifParams.debug_params->bestats_debug_params_valid = TRUE;
5601        }
5602    }
5603    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
5604            CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
5605        if (mExifParams.debug_params) {
5606            mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
5607            mExifParams.debug_params->bhist_debug_params_valid = TRUE;
5608        }
5609    }
5610    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
5611            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
5612        if (mExifParams.debug_params) {
5613            mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
5614            mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
5615        }
5616    }
5617}
5618
5619/*===========================================================================
5620 * FUNCTION   : get3AExifParams
5621 *
5622 * DESCRIPTION:
5623 *
5624 * PARAMETERS : none
5625 *
5626 *
5627 * RETURN     : mm_jpeg_exif_params_t
5628 *
5629 *==========================================================================*/
5630mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5631{
5632    return mExifParams;
5633}
5634
5635/*===========================================================================
5636 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
5637 *
5638 * DESCRIPTION:
5639 *
5640 * PARAMETERS :
5641 *   @metadata : metadata information from callback
5642 *
5643 * RETURN     : camera_metadata_t*
5644 *              metadata in a format specified by fwk
5645 *==========================================================================*/
5646camera_metadata_t*
5647QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5648                                (metadata_buffer_t *metadata)
5649{
5650    CameraMetadata camMetadata;
5651    camera_metadata_t *resultMetadata;
5652
5653
5654    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5655        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5656        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5657        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
5658    }
5659
5660    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5661        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5662                &aecTrigger->trigger, 1);
5663        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5664                &aecTrigger->trigger_id, 1);
5665        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5666                 aecTrigger->trigger);
5667        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
5668                aecTrigger->trigger_id);
5669    }
5670
5671    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5672        uint8_t fwk_ae_state = (uint8_t) *ae_state;
5673        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5674        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
5675    }
5676
5677    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5678        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5679        if (NAME_NOT_FOUND != val) {
5680            uint8_t fwkAfMode = (uint8_t)val;
5681            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5682            LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5683        } else {
5684            LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5685                    val);
5686        }
5687    }
5688
5689    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5690        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5691                &af_trigger->trigger, 1);
5692        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5693                 af_trigger->trigger);
5694        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5695        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
5696                af_trigger->trigger_id);
5697    }
5698
5699    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5700        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5701                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5702        if (NAME_NOT_FOUND != val) {
5703            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5704            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5705            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
5706        } else {
5707            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
5708        }
5709    }
5710
5711    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5712    uint32_t aeMode = CAM_AE_MODE_MAX;
5713    int32_t flashMode = CAM_FLASH_MODE_MAX;
5714    int32_t redeye = -1;
5715    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5716        aeMode = *pAeMode;
5717    }
5718    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5719        flashMode = *pFlashMode;
5720    }
5721    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5722        redeye = *pRedeye;
5723    }
5724
5725    if (1 == redeye) {
5726        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5727        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5728    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5729        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5730                flashMode);
5731        if (NAME_NOT_FOUND != val) {
5732            fwk_aeMode = (uint8_t)val;
5733            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5734        } else {
5735            LOGE("Unsupported flash mode %d", flashMode);
5736        }
5737    } else if (aeMode == CAM_AE_MODE_ON) {
5738        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5739        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5740    } else if (aeMode == CAM_AE_MODE_OFF) {
5741        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5742        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5743    } else {
5744        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5745              "flashMode:%d, aeMode:%u!!!",
5746                 redeye, flashMode, aeMode);
5747    }
5748
5749    resultMetadata = camMetadata.release();
5750    return resultMetadata;
5751}
5752
5753/*===========================================================================
5754 * FUNCTION   : dumpMetadataToFile
5755 *
5756 * DESCRIPTION: Dumps tuning metadata to file system
5757 *
5758 * PARAMETERS :
5759 *   @meta           : tuning metadata
5760 *   @dumpFrameCount : current dump frame count
5761 *   @enabled        : Enable mask
5762 *
5763 *==========================================================================*/
5764void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5765                                                   uint32_t &dumpFrameCount,
5766                                                   bool enabled,
5767                                                   const char *type,
5768                                                   uint32_t frameNumber)
5769{
5770    //Some sanity checks
5771    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5772        LOGE("Tuning sensor data size bigger than expected %d: %d",
5773              meta.tuning_sensor_data_size,
5774              TUNING_SENSOR_DATA_MAX);
5775        return;
5776    }
5777
5778    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5779        LOGE("Tuning VFE data size bigger than expected %d: %d",
5780              meta.tuning_vfe_data_size,
5781              TUNING_VFE_DATA_MAX);
5782        return;
5783    }
5784
5785    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5786        LOGE("Tuning CPP data size bigger than expected %d: %d",
5787              meta.tuning_cpp_data_size,
5788              TUNING_CPP_DATA_MAX);
5789        return;
5790    }
5791
5792    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5793        LOGE("Tuning CAC data size bigger than expected %d: %d",
5794              meta.tuning_cac_data_size,
5795              TUNING_CAC_DATA_MAX);
5796        return;
5797    }
5798    //
5799
5800    if(enabled){
5801        char timeBuf[FILENAME_MAX];
5802        char buf[FILENAME_MAX];
5803        memset(buf, 0, sizeof(buf));
5804        memset(timeBuf, 0, sizeof(timeBuf));
5805        time_t current_time;
5806        struct tm * timeinfo;
5807        time (&current_time);
5808        timeinfo = localtime (&current_time);
5809        if (timeinfo != NULL) {
5810            strftime (timeBuf, sizeof(timeBuf),
5811                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5812        }
5813        String8 filePath(timeBuf);
5814        snprintf(buf,
5815                sizeof(buf),
5816                "%dm_%s_%d.bin",
5817                dumpFrameCount,
5818                type,
5819                frameNumber);
5820        filePath.append(buf);
5821        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5822        if (file_fd >= 0) {
5823            ssize_t written_len = 0;
5824            meta.tuning_data_version = TUNING_DATA_VERSION;
5825            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5826            written_len += write(file_fd, data, sizeof(uint32_t));
5827            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5828            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5829            written_len += write(file_fd, data, sizeof(uint32_t));
5830            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5831            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5832            written_len += write(file_fd, data, sizeof(uint32_t));
5833            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5834            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5835            written_len += write(file_fd, data, sizeof(uint32_t));
5836            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5837            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5838            written_len += write(file_fd, data, sizeof(uint32_t));
5839            meta.tuning_mod3_data_size = 0;
5840            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5841            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5842            written_len += write(file_fd, data, sizeof(uint32_t));
5843            size_t total_size = meta.tuning_sensor_data_size;
5844            data = (void *)((uint8_t *)&meta.data);
5845            written_len += write(file_fd, data, total_size);
5846            total_size = meta.tuning_vfe_data_size;
5847            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5848            written_len += write(file_fd, data, total_size);
5849            total_size = meta.tuning_cpp_data_size;
5850            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5851            written_len += write(file_fd, data, total_size);
5852            total_size = meta.tuning_cac_data_size;
5853            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5854            written_len += write(file_fd, data, total_size);
5855            close(file_fd);
5856        }else {
5857            LOGE("fail to open file for metadata dumping");
5858        }
5859    }
5860}
5861
5862/*===========================================================================
5863 * FUNCTION   : cleanAndSortStreamInfo
5864 *
5865 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5866 *              and sort them such that raw stream is at the end of the list
5867 *              This is a workaround for camera daemon constraint.
5868 *
5869 * PARAMETERS : None
5870 *
5871 *==========================================================================*/
5872void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5873{
5874    List<stream_info_t *> newStreamInfo;
5875
5876    /*clean up invalid streams*/
5877    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5878            it != mStreamInfo.end();) {
5879        if(((*it)->status) == INVALID){
5880            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5881            delete channel;
5882            free(*it);
5883            it = mStreamInfo.erase(it);
5884        } else {
5885            it++;
5886        }
5887    }
5888
5889    // Move preview/video/callback/snapshot streams into newList
5890    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5891            it != mStreamInfo.end();) {
5892        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5893                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5894                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5895            newStreamInfo.push_back(*it);
5896            it = mStreamInfo.erase(it);
5897        } else
5898            it++;
5899    }
5900    // Move raw streams into newList
5901    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5902            it != mStreamInfo.end();) {
5903        newStreamInfo.push_back(*it);
5904        it = mStreamInfo.erase(it);
5905    }
5906
5907    mStreamInfo = newStreamInfo;
5908}
5909
5910/*===========================================================================
5911 * FUNCTION   : extractJpegMetadata
5912 *
5913 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5914 *              JPEG metadata is cached in HAL, and return as part of capture
5915 *              result when metadata is returned from camera daemon.
5916 *
5917 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5918 *              @request:      capture request
5919 *
5920 *==========================================================================*/
5921void QCamera3HardwareInterface::extractJpegMetadata(
5922        CameraMetadata& jpegMetadata,
5923        const camera3_capture_request_t *request)
5924{
5925    CameraMetadata frame_settings;
5926    frame_settings = request->settings;
5927
5928    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5929        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5930                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5931                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5932
5933    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5934        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5935                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5936                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5937
5938    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5939        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5940                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5941                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5942
5943    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5944        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5945                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5946                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5947
5948    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5949        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5950                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5951                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5952
5953    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5954        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5955                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5956                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5957
5958    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5959        int32_t thumbnail_size[2];
5960        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5961        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5962        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5963            int32_t orientation =
5964                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5965            if ((orientation == 90) || (orientation == 270)) {
5966               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5967               int32_t temp;
5968               temp = thumbnail_size[0];
5969               thumbnail_size[0] = thumbnail_size[1];
5970               thumbnail_size[1] = temp;
5971            }
5972         }
5973         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5974                thumbnail_size,
5975                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5976    }
5977
5978}
5979
5980/*===========================================================================
5981 * FUNCTION   : convertToRegions
5982 *
5983 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5984 *
5985 * PARAMETERS :
5986 *   @rect   : cam_rect_t struct to convert
5987 *   @region : int32_t destination array
5988 *   @weight : if we are converting from cam_area_t, weight is valid
5989 *             else weight = -1
5990 *
5991 *==========================================================================*/
5992void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5993        int32_t *region, int weight)
5994{
5995    region[0] = rect.left;
5996    region[1] = rect.top;
5997    region[2] = rect.left + rect.width;
5998    region[3] = rect.top + rect.height;
5999    if (weight > -1) {
6000        region[4] = weight;
6001    }
6002}
6003
6004/*===========================================================================
6005 * FUNCTION   : convertFromRegions
6006 *
6007 * DESCRIPTION: helper method to convert from array to cam_rect_t
6008 *
6009 * PARAMETERS :
6010 *   @rect   : cam_rect_t struct to convert
6011 *   @region : int32_t destination array
6012 *   @weight : if we are converting from cam_area_t, weight is valid
6013 *             else weight = -1
6014 *
6015 *==========================================================================*/
6016void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6017        const camera_metadata_t *settings, uint32_t tag)
6018{
6019    CameraMetadata frame_settings;
6020    frame_settings = settings;
6021    int32_t x_min = frame_settings.find(tag).data.i32[0];
6022    int32_t y_min = frame_settings.find(tag).data.i32[1];
6023    int32_t x_max = frame_settings.find(tag).data.i32[2];
6024    int32_t y_max = frame_settings.find(tag).data.i32[3];
6025    roi.weight = frame_settings.find(tag).data.i32[4];
6026    roi.rect.left = x_min;
6027    roi.rect.top = y_min;
6028    roi.rect.width = x_max - x_min;
6029    roi.rect.height = y_max - y_min;
6030}
6031
6032/*===========================================================================
6033 * FUNCTION   : resetIfNeededROI
6034 *
6035 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6036 *              crop region
6037 *
6038 * PARAMETERS :
6039 *   @roi       : cam_area_t struct to resize
6040 *   @scalerCropRegion : cam_crop_region_t region to compare against
6041 *
6042 *
6043 *==========================================================================*/
6044bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6045                                                 const cam_crop_region_t* scalerCropRegion)
6046{
6047    int32_t roi_x_max = roi->rect.width + roi->rect.left;
6048    int32_t roi_y_max = roi->rect.height + roi->rect.top;
6049    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6050    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6051
6052    /* According to spec weight = 0 is used to indicate roi needs to be disabled
6053     * without having this check the calculations below to validate if the roi
6054     * is inside scalar crop region will fail resulting in the roi not being
6055     * reset causing algorithm to continue to use stale roi window
6056     */
6057    if (roi->weight == 0) {
6058        return true;
6059    }
6060
6061    if ((roi_x_max < scalerCropRegion->left) ||
6062        // right edge of roi window is left of scalar crop's left edge
6063        (roi_y_max < scalerCropRegion->top)  ||
6064        // bottom edge of roi window is above scalar crop's top edge
6065        (roi->rect.left > crop_x_max) ||
6066        // left edge of roi window is beyond(right) of scalar crop's right edge
6067        (roi->rect.top > crop_y_max)){
6068        // top edge of roi windo is above scalar crop's top edge
6069        return false;
6070    }
6071    if (roi->rect.left < scalerCropRegion->left) {
6072        roi->rect.left = scalerCropRegion->left;
6073    }
6074    if (roi->rect.top < scalerCropRegion->top) {
6075        roi->rect.top = scalerCropRegion->top;
6076    }
6077    if (roi_x_max > crop_x_max) {
6078        roi_x_max = crop_x_max;
6079    }
6080    if (roi_y_max > crop_y_max) {
6081        roi_y_max = crop_y_max;
6082    }
6083    roi->rect.width = roi_x_max - roi->rect.left;
6084    roi->rect.height = roi_y_max - roi->rect.top;
6085    return true;
6086}
6087
6088/*===========================================================================
6089 * FUNCTION   : convertLandmarks
6090 *
6091 * DESCRIPTION: helper method to extract the landmarks from face detection info
6092 *
6093 * PARAMETERS :
6094 *   @landmark_data : input landmark data to be converted
6095 *   @landmarks : int32_t destination array
6096 *
6097 *
6098 *==========================================================================*/
6099void QCamera3HardwareInterface::convertLandmarks(
6100        cam_face_landmarks_info_t landmark_data,
6101        int32_t *landmarks)
6102{
6103    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6104    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6105    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6106    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6107    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6108    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6109}
6110
6111#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6112/*===========================================================================
6113 * FUNCTION   : initCapabilities
6114 *
6115 * DESCRIPTION: initialize camera capabilities in static data struct
6116 *
6117 * PARAMETERS :
6118 *   @cameraId  : camera Id
6119 *
6120 * RETURN     : int32_t type of status
6121 *              NO_ERROR  -- success
6122 *              none-zero failure code
6123 *==========================================================================*/
6124int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6125{
6126    int rc = 0;
6127    mm_camera_vtbl_t *cameraHandle = NULL;
6128    QCamera3HeapMemory *capabilityHeap = NULL;
6129
6130    rc = camera_open((uint8_t)cameraId, &cameraHandle);
6131    if (rc) {
6132        LOGE("camera_open failed. rc = %d", rc);
6133        goto open_failed;
6134    }
6135    if (!cameraHandle) {
6136        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6137        goto open_failed;
6138    }
6139
6140    capabilityHeap = new QCamera3HeapMemory(1);
6141    if (capabilityHeap == NULL) {
6142        LOGE("creation of capabilityHeap failed");
6143        goto heap_creation_failed;
6144    }
6145    /* Allocate memory for capability buffer */
6146    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6147    if(rc != OK) {
6148        LOGE("No memory for cappability");
6149        goto allocate_failed;
6150    }
6151
6152    /* Map memory for capability buffer */
6153    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6154    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6155                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
6156                                capabilityHeap->getFd(0),
6157                                sizeof(cam_capability_t));
6158    if(rc < 0) {
6159        LOGE("failed to map capability buffer");
6160        goto map_failed;
6161    }
6162
6163    /* Query Capability */
6164    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6165    if(rc < 0) {
6166        LOGE("failed to query capability");
6167        goto query_failed;
6168    }
6169    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6170    if (!gCamCapability[cameraId]) {
6171        LOGE("out of memory");
6172        goto query_failed;
6173    }
6174    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6175                                        sizeof(cam_capability_t));
6176
6177    int index;
6178    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6179        cam_analysis_info_t *p_analysis_info =
6180                &gCamCapability[cameraId]->analysis_info[index];
6181        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6182        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6183    }
6184    rc = 0;
6185
6186query_failed:
6187    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6188                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
6189map_failed:
6190    capabilityHeap->deallocate();
6191allocate_failed:
6192    delete capabilityHeap;
6193heap_creation_failed:
6194    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6195    cameraHandle = NULL;
6196open_failed:
6197    return rc;
6198}
6199
6200/*==========================================================================
6201 * FUNCTION   : get3Aversion
6202 *
6203 * DESCRIPTION: get the Q3A S/W version
6204 *
6205 * PARAMETERS :
6206 *  @sw_version: Reference of Q3A structure which will hold version info upon
6207 *               return
6208 *
6209 * RETURN     : None
6210 *
6211 *==========================================================================*/
6212void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6213{
6214    if(gCamCapability[mCameraId])
6215        sw_version = gCamCapability[mCameraId]->q3a_version;
6216    else
6217        LOGE("Capability structure NULL!");
6218}
6219
6220
6221/*===========================================================================
6222 * FUNCTION   : initParameters
6223 *
6224 * DESCRIPTION: initialize camera parameters
6225 *
6226 * PARAMETERS :
6227 *
6228 * RETURN     : int32_t type of status
6229 *              NO_ERROR  -- success
6230 *              none-zero failure code
6231 *==========================================================================*/
6232int QCamera3HardwareInterface::initParameters()
6233{
6234    int rc = 0;
6235
6236    //Allocate Set Param Buffer
6237    mParamHeap = new QCamera3HeapMemory(1);
6238    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6239    if(rc != OK) {
6240        rc = NO_MEMORY;
6241        LOGE("Failed to allocate SETPARM Heap memory");
6242        delete mParamHeap;
6243        mParamHeap = NULL;
6244        return rc;
6245    }
6246
6247    //Map memory for parameters buffer
6248    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6249            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6250            mParamHeap->getFd(0),
6251            sizeof(metadata_buffer_t));
6252    if(rc < 0) {
6253        LOGE("failed to map SETPARM buffer");
6254        rc = FAILED_TRANSACTION;
6255        mParamHeap->deallocate();
6256        delete mParamHeap;
6257        mParamHeap = NULL;
6258        return rc;
6259    }
6260
6261    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6262
6263    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6264    return rc;
6265}
6266
6267/*===========================================================================
6268 * FUNCTION   : deinitParameters
6269 *
6270 * DESCRIPTION: de-initialize camera parameters
6271 *
6272 * PARAMETERS :
6273 *
6274 * RETURN     : NONE
6275 *==========================================================================*/
6276void QCamera3HardwareInterface::deinitParameters()
6277{
6278    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6279            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6280
6281    mParamHeap->deallocate();
6282    delete mParamHeap;
6283    mParamHeap = NULL;
6284
6285    mParameters = NULL;
6286
6287    free(mPrevParameters);
6288    mPrevParameters = NULL;
6289}
6290
6291/*===========================================================================
6292 * FUNCTION   : calcMaxJpegSize
6293 *
6294 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6295 *
6296 * PARAMETERS :
6297 *
6298 * RETURN     : max_jpeg_size
6299 *==========================================================================*/
6300size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6301{
6302    size_t max_jpeg_size = 0;
6303    size_t temp_width, temp_height;
6304    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6305            MAX_SIZES_CNT);
6306    for (size_t i = 0; i < count; i++) {
6307        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6308        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6309        if (temp_width * temp_height > max_jpeg_size ) {
6310            max_jpeg_size = temp_width * temp_height;
6311        }
6312    }
6313    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6314    return max_jpeg_size;
6315}
6316
6317/*===========================================================================
6318 * FUNCTION   : getMaxRawSize
6319 *
6320 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6321 *
6322 * PARAMETERS :
6323 *
6324 * RETURN     : Largest supported Raw Dimension
6325 *==========================================================================*/
6326cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6327{
6328    int max_width = 0;
6329    cam_dimension_t maxRawSize;
6330
6331    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6332    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6333        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6334            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6335            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6336        }
6337    }
6338    return maxRawSize;
6339}
6340
6341
6342/*===========================================================================
6343 * FUNCTION   : calcMaxJpegDim
6344 *
6345 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6346 *
6347 * PARAMETERS :
6348 *
6349 * RETURN     : max_jpeg_dim
6350 *==========================================================================*/
6351cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6352{
6353    cam_dimension_t max_jpeg_dim;
6354    cam_dimension_t curr_jpeg_dim;
6355    max_jpeg_dim.width = 0;
6356    max_jpeg_dim.height = 0;
6357    curr_jpeg_dim.width = 0;
6358    curr_jpeg_dim.height = 0;
6359    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6360        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6361        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6362        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6363            max_jpeg_dim.width * max_jpeg_dim.height ) {
6364            max_jpeg_dim.width = curr_jpeg_dim.width;
6365            max_jpeg_dim.height = curr_jpeg_dim.height;
6366        }
6367    }
6368    return max_jpeg_dim;
6369}
6370
6371/*===========================================================================
6372 * FUNCTION   : addStreamConfig
6373 *
6374 * DESCRIPTION: adds the stream configuration to the array
6375 *
6376 * PARAMETERS :
6377 * @available_stream_configs : pointer to stream configuration array
6378 * @scalar_format            : scalar format
6379 * @dim                      : configuration dimension
6380 * @config_type              : input or output configuration type
6381 *
6382 * RETURN     : NONE
6383 *==========================================================================*/
6384void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6385        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6386{
6387    available_stream_configs.add(scalar_format);
6388    available_stream_configs.add(dim.width);
6389    available_stream_configs.add(dim.height);
6390    available_stream_configs.add(config_type);
6391}
6392
6393/*===========================================================================
6394 * FUNCTION   : suppportBurstCapture
6395 *
6396 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6397 *
6398 * PARAMETERS :
6399 *   @cameraId  : camera Id
6400 *
6401 * RETURN     : true if camera supports BURST_CAPTURE
6402 *              false otherwise
6403 *==========================================================================*/
6404bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6405{
6406    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6407    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6408    const int32_t highResWidth = 3264;
6409    const int32_t highResHeight = 2448;
6410
6411    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6412        // Maximum resolution images cannot be captured at >= 10fps
6413        // -> not supporting BURST_CAPTURE
6414        return false;
6415    }
6416
6417    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6418        // Maximum resolution images can be captured at >= 20fps
6419        // --> supporting BURST_CAPTURE
6420        return true;
6421    }
6422
6423    // Find the smallest highRes resolution, or largest resolution if there is none
6424    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6425            MAX_SIZES_CNT);
6426    size_t highRes = 0;
6427    while ((highRes + 1 < totalCnt) &&
6428            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6429            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6430            highResWidth * highResHeight)) {
6431        highRes++;
6432    }
6433    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6434        return true;
6435    } else {
6436        return false;
6437    }
6438}
6439
6440/*===========================================================================
6441 * FUNCTION   : initStaticMetadata
6442 *
6443 * DESCRIPTION: initialize the static metadata
6444 *
6445 * PARAMETERS :
6446 *   @cameraId  : camera Id
6447 *
6448 * RETURN     : int32_t type of status
6449 *              0  -- success
6450 *              non-zero failure code
6451 *==========================================================================*/
6452int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6453{
6454    int rc = 0;
6455    CameraMetadata staticInfo;
6456    size_t count = 0;
6457    bool limitedDevice = false;
6458    char prop[PROPERTY_VALUE_MAX];
6459    bool supportBurst = false;
6460
6461    supportBurst = supportBurstCapture(cameraId);
6462
6463    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6464     * guaranteed or if min fps of max resolution is less than 20 fps, its
6465     * advertised as limited device*/
6466    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6467            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6468            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6469            !supportBurst;
6470
6471    uint8_t supportedHwLvl = limitedDevice ?
6472            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6473            // LEVEL_3 - This device will support level 3.
6474            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6475
6476    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6477            &supportedHwLvl, 1);
6478
6479    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6480    /*HAL 3 only*/
6481    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6482                    &gCamCapability[cameraId]->min_focus_distance, 1);
6483
6484    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6485                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6486
6487    /*should be using focal lengths but sensor doesn't provide that info now*/
6488    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6489                      &gCamCapability[cameraId]->focal_length,
6490                      1);
6491
6492    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6493            gCamCapability[cameraId]->apertures,
6494            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6495
6496    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6497            gCamCapability[cameraId]->filter_densities,
6498            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6499
6500
6501    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6502            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6503            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6504
6505    int32_t lens_shading_map_size[] = {
6506            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6507            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6508    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6509                      lens_shading_map_size,
6510                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6511
6512    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6513            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6514
6515    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6516            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6517
6518    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6519            &gCamCapability[cameraId]->max_frame_duration, 1);
6520
6521    camera_metadata_rational baseGainFactor = {
6522            gCamCapability[cameraId]->base_gain_factor.numerator,
6523            gCamCapability[cameraId]->base_gain_factor.denominator};
6524    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6525                      &baseGainFactor, 1);
6526
6527    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6528                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6529
6530    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6531            gCamCapability[cameraId]->pixel_array_size.height};
6532    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6533                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6534
6535    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6536            gCamCapability[cameraId]->active_array_size.top,
6537            gCamCapability[cameraId]->active_array_size.width,
6538            gCamCapability[cameraId]->active_array_size.height};
6539    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6540            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6541
6542    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6543            &gCamCapability[cameraId]->white_level, 1);
6544
6545    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6546            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6547
6548    bool hasBlackRegions = false;
6549    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6550        LOGW("black_region_count: %d is bounded to %d",
6551            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6552        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6553    }
6554    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6555        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6556        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6557            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6558        }
6559        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6560                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6561        hasBlackRegions = true;
6562    }
6563
6564    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6565            &gCamCapability[cameraId]->flash_charge_duration, 1);
6566
6567    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6568            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6569
6570    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
6571    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6572            &timestampSource, 1);
6573
6574    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6575            &gCamCapability[cameraId]->histogram_size, 1);
6576
6577    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6578            &gCamCapability[cameraId]->max_histogram_count, 1);
6579
6580    int32_t sharpness_map_size[] = {
6581            gCamCapability[cameraId]->sharpness_map_size.width,
6582            gCamCapability[cameraId]->sharpness_map_size.height};
6583
6584    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6585            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6586
6587    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6588            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6589
6590    int32_t scalar_formats[] = {
6591            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6592            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6593            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6594            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6595            HAL_PIXEL_FORMAT_RAW10,
6596            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6597    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6598    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6599                      scalar_formats,
6600                      scalar_formats_count);
6601
6602    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6603    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6604    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6605            count, MAX_SIZES_CNT, available_processed_sizes);
6606    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6607            available_processed_sizes, count * 2);
6608
6609    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6610    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6611    makeTable(gCamCapability[cameraId]->raw_dim,
6612            count, MAX_SIZES_CNT, available_raw_sizes);
6613    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6614            available_raw_sizes, count * 2);
6615
6616    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6617    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6618    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6619            count, MAX_SIZES_CNT, available_fps_ranges);
6620    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6621            available_fps_ranges, count * 2);
6622
6623    camera_metadata_rational exposureCompensationStep = {
6624            gCamCapability[cameraId]->exp_compensation_step.numerator,
6625            gCamCapability[cameraId]->exp_compensation_step.denominator};
6626    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6627                      &exposureCompensationStep, 1);
6628
6629    Vector<uint8_t> availableVstabModes;
6630    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6631    char eis_prop[PROPERTY_VALUE_MAX];
6632    memset(eis_prop, 0, sizeof(eis_prop));
6633    property_get("persist.camera.eis.enable", eis_prop, "0");
6634    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6635    if (facingBack && eis_prop_set) {
6636        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6637    }
6638    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6639                      availableVstabModes.array(), availableVstabModes.size());
6640
6641    /*HAL 1 and HAL 3 common*/
6642    float maxZoom = 4;
6643    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6644            &maxZoom, 1);
6645
6646    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
6647    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6648
6649    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6650    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6651        max3aRegions[2] = 0; /* AF not supported */
6652    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6653            max3aRegions, 3);
6654
6655    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6656    memset(prop, 0, sizeof(prop));
6657    property_get("persist.camera.facedetect", prop, "1");
6658    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6659    LOGD("Support face detection mode: %d",
6660             supportedFaceDetectMode);
6661
6662    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6663    Vector<uint8_t> availableFaceDetectModes;
6664    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6665    if (supportedFaceDetectMode == 1) {
6666        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6667    } else if (supportedFaceDetectMode == 2) {
6668        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6669    } else if (supportedFaceDetectMode == 3) {
6670        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6671        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6672    } else {
6673        maxFaces = 0;
6674    }
6675    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6676            availableFaceDetectModes.array(),
6677            availableFaceDetectModes.size());
6678    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6679            (int32_t *)&maxFaces, 1);
6680
6681    int32_t exposureCompensationRange[] = {
6682            gCamCapability[cameraId]->exposure_compensation_min,
6683            gCamCapability[cameraId]->exposure_compensation_max};
6684    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6685            exposureCompensationRange,
6686            sizeof(exposureCompensationRange)/sizeof(int32_t));
6687
6688    uint8_t lensFacing = (facingBack) ?
6689            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6690    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6691
6692    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6693                      available_thumbnail_sizes,
6694                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6695
6696    /*all sizes will be clubbed into this tag*/
6697    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6698    /*android.scaler.availableStreamConfigurations*/
6699    Vector<int32_t> available_stream_configs;
6700    cam_dimension_t active_array_dim;
6701    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6702    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6703    /* Add input/output stream configurations for each scalar formats*/
6704    for (size_t j = 0; j < scalar_formats_count; j++) {
6705        switch (scalar_formats[j]) {
6706        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6707        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6708        case HAL_PIXEL_FORMAT_RAW10:
6709            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6710                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6711                addStreamConfig(available_stream_configs, scalar_formats[j],
6712                        gCamCapability[cameraId]->raw_dim[i],
6713                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6714            }
6715            break;
6716        case HAL_PIXEL_FORMAT_BLOB:
6717            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6718                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6719                addStreamConfig(available_stream_configs, scalar_formats[j],
6720                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6721                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6722            }
6723            break;
6724        case HAL_PIXEL_FORMAT_YCbCr_420_888:
6725        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6726        default:
6727            cam_dimension_t largest_picture_size;
6728            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6729            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6730                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6731                addStreamConfig(available_stream_configs, scalar_formats[j],
6732                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6733                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6734                /* Book keep largest */
6735                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6736                        >= largest_picture_size.width &&
6737                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
6738                        >= largest_picture_size.height)
6739                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6740            }
6741            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6742            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6743                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6744                 addStreamConfig(available_stream_configs, scalar_formats[j],
6745                         largest_picture_size,
6746                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6747            }
6748            break;
6749        }
6750    }
6751
6752    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6753                      available_stream_configs.array(), available_stream_configs.size());
6754    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6755    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6756
6757    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6758    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6759
6760    /* android.scaler.availableMinFrameDurations */
6761    Vector<int64_t> available_min_durations;
6762    for (size_t j = 0; j < scalar_formats_count; j++) {
6763        switch (scalar_formats[j]) {
6764        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6765        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6766        case HAL_PIXEL_FORMAT_RAW10:
6767            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6768                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6769                available_min_durations.add(scalar_formats[j]);
6770                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
6771                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
6772                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
6773            }
6774            break;
6775        default:
6776            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6777                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6778                available_min_durations.add(scalar_formats[j]);
6779                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
6780                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
6781                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
6782            }
6783            break;
6784        }
6785    }
6786    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6787                      available_min_durations.array(), available_min_durations.size());
6788
6789    Vector<int32_t> available_hfr_configs;
6790    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6791        int32_t fps = 0;
6792        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6793        case CAM_HFR_MODE_60FPS:
6794            fps = 60;
6795            break;
6796        case CAM_HFR_MODE_90FPS:
6797            fps = 90;
6798            break;
6799        case CAM_HFR_MODE_120FPS:
6800            fps = 120;
6801            break;
6802        case CAM_HFR_MODE_150FPS:
6803            fps = 150;
6804            break;
6805        case CAM_HFR_MODE_180FPS:
6806            fps = 180;
6807            break;
6808        case CAM_HFR_MODE_210FPS:
6809            fps = 210;
6810            break;
6811        case CAM_HFR_MODE_240FPS:
6812            fps = 240;
6813            break;
6814        case CAM_HFR_MODE_480FPS:
6815            fps = 480;
6816            break;
6817        case CAM_HFR_MODE_OFF:
6818        case CAM_HFR_MODE_MAX:
6819        default:
6820            break;
6821        }
6822
6823        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6824        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6825            /* For each HFR frame rate, need to advertise one variable fps range
6826             * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
6827             * and [120, 120]. While camcorder preview alone is running [30, 120] is
6828             * set by the app. When video recording is started, [120, 120] is
6829             * set. This way sensor configuration does not change when recording
6830             * is started */
6831
6832            /* (width, height, fps_min, fps_max, batch_size_max) */
6833            for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
6834                j < MAX_SIZES_CNT; j++) {
6835                available_hfr_configs.add(
6836                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
6837                available_hfr_configs.add(
6838                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
6839                available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6840                available_hfr_configs.add(fps);
6841                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6842
6843                /* (width, height, fps_min, fps_max, batch_size_max) */
6844                available_hfr_configs.add(
6845                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
6846                available_hfr_configs.add(
6847                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
6848                available_hfr_configs.add(fps);
6849                available_hfr_configs.add(fps);
6850                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6851            }
6852       }
6853    }
6854    //Advertise HFR capability only if the property is set
6855    memset(prop, 0, sizeof(prop));
6856    property_get("persist.camera.hal3hfr.enable", prop, "1");
6857    uint8_t hfrEnable = (uint8_t)atoi(prop);
6858
6859    if(hfrEnable && available_hfr_configs.array()) {
6860        staticInfo.update(
6861                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6862                available_hfr_configs.array(), available_hfr_configs.size());
6863    }
6864
6865    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6866    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6867                      &max_jpeg_size, 1);
6868
6869    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6870    size_t size = 0;
6871    count = CAM_EFFECT_MODE_MAX;
6872    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6873    for (size_t i = 0; i < count; i++) {
6874        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6875                gCamCapability[cameraId]->supported_effects[i]);
6876        if (NAME_NOT_FOUND != val) {
6877            avail_effects[size] = (uint8_t)val;
6878            size++;
6879        }
6880    }
6881    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6882                      avail_effects,
6883                      size);
6884
6885    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6886    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6887    size_t supported_scene_modes_cnt = 0;
6888    count = CAM_SCENE_MODE_MAX;
6889    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6890    for (size_t i = 0; i < count; i++) {
6891        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6892                CAM_SCENE_MODE_OFF) {
6893            int val = lookupFwkName(SCENE_MODES_MAP,
6894                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
6895                    gCamCapability[cameraId]->supported_scene_modes[i]);
6896            if (NAME_NOT_FOUND != val) {
6897                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6898                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6899                supported_scene_modes_cnt++;
6900            }
6901        }
6902    }
6903    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6904                      avail_scene_modes,
6905                      supported_scene_modes_cnt);
6906
6907    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6908    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6909                      supported_scene_modes_cnt,
6910                      CAM_SCENE_MODE_MAX,
6911                      scene_mode_overrides,
6912                      supported_indexes,
6913                      cameraId);
6914
6915    if (supported_scene_modes_cnt == 0) {
6916        supported_scene_modes_cnt = 1;
6917        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6918    }
6919
6920    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6921            scene_mode_overrides, supported_scene_modes_cnt * 3);
6922
6923    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6924                                         ANDROID_CONTROL_MODE_AUTO,
6925                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6926    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6927            available_control_modes,
6928            3);
6929
6930    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6931    size = 0;
6932    count = CAM_ANTIBANDING_MODE_MAX;
6933    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6934    for (size_t i = 0; i < count; i++) {
6935        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6936                gCamCapability[cameraId]->supported_antibandings[i]);
6937        if (NAME_NOT_FOUND != val) {
6938            avail_antibanding_modes[size] = (uint8_t)val;
6939            size++;
6940        }
6941
6942    }
6943    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6944                      avail_antibanding_modes,
6945                      size);
6946
6947    uint8_t avail_abberation_modes[] = {
6948            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
6949            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
6950            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
6951    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6952    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6953    if (0 == count) {
6954        //  If no aberration correction modes are available for a device, this advertise OFF mode
6955        size = 1;
6956    } else {
6957        // If count is not zero then atleast one among the FAST or HIGH quality is supported
6958        // So, advertize all 3 modes if atleast any one mode is supported as per the
6959        // new M requirement
6960        size = 3;
6961    }
6962    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6963            avail_abberation_modes,
6964            size);
6965
6966    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6967    size = 0;
6968    count = CAM_FOCUS_MODE_MAX;
6969    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6970    for (size_t i = 0; i < count; i++) {
6971        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6972                gCamCapability[cameraId]->supported_focus_modes[i]);
6973        if (NAME_NOT_FOUND != val) {
6974            avail_af_modes[size] = (uint8_t)val;
6975            size++;
6976        }
6977    }
6978    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6979                      avail_af_modes,
6980                      size);
6981
6982    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6983    size = 0;
6984    count = CAM_WB_MODE_MAX;
6985    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6986    for (size_t i = 0; i < count; i++) {
6987        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6988                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6989                gCamCapability[cameraId]->supported_white_balances[i]);
6990        if (NAME_NOT_FOUND != val) {
6991            avail_awb_modes[size] = (uint8_t)val;
6992            size++;
6993        }
6994    }
6995    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6996                      avail_awb_modes,
6997                      size);
6998
6999    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7000    count = CAM_FLASH_FIRING_LEVEL_MAX;
7001    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7002            count);
7003    for (size_t i = 0; i < count; i++) {
7004        available_flash_levels[i] =
7005                gCamCapability[cameraId]->supported_firing_levels[i];
7006    }
7007    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7008            available_flash_levels, count);
7009
7010    uint8_t flashAvailable;
7011    if (gCamCapability[cameraId]->flash_available)
7012        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7013    else
7014        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7015    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7016            &flashAvailable, 1);
7017
7018    Vector<uint8_t> avail_ae_modes;
7019    count = CAM_AE_MODE_MAX;
7020    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7021    for (size_t i = 0; i < count; i++) {
7022        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7023    }
7024    if (flashAvailable) {
7025        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7026        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7027    }
7028    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7029                      avail_ae_modes.array(),
7030                      avail_ae_modes.size());
7031
7032    int32_t sensitivity_range[2];
7033    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7034    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7035    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7036                      sensitivity_range,
7037                      sizeof(sensitivity_range) / sizeof(int32_t));
7038
7039    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7040                      &gCamCapability[cameraId]->max_analog_sensitivity,
7041                      1);
7042
7043    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7044    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7045                      &sensor_orientation,
7046                      1);
7047
7048    int32_t max_output_streams[] = {
7049            MAX_STALLING_STREAMS,
7050            MAX_PROCESSED_STREAMS,
7051            MAX_RAW_STREAMS};
7052    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7053            max_output_streams,
7054            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7055
7056    uint8_t avail_leds = 0;
7057    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7058                      &avail_leds, 0);
7059
7060    uint8_t focus_dist_calibrated;
7061    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7062            gCamCapability[cameraId]->focus_dist_calibrated);
7063    if (NAME_NOT_FOUND != val) {
7064        focus_dist_calibrated = (uint8_t)val;
7065        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7066                     &focus_dist_calibrated, 1);
7067    }
7068
7069    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7070    size = 0;
7071    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7072            MAX_TEST_PATTERN_CNT);
7073    for (size_t i = 0; i < count; i++) {
7074        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7075                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7076        if (NAME_NOT_FOUND != testpatternMode) {
7077            avail_testpattern_modes[size] = testpatternMode;
7078            size++;
7079        }
7080    }
7081    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7082                      avail_testpattern_modes,
7083                      size);
7084
7085    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7086    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7087                      &max_pipeline_depth,
7088                      1);
7089
7090    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7091    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7092                      &partial_result_count,
7093                       1);
7094
7095    int32_t max_stall_duration = MAX_REPROCESS_STALL;
7096    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7097
7098    Vector<uint8_t> available_capabilities;
7099    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7100    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7101    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7102    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7103    if (supportBurst) {
7104        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7105    }
7106    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7107    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7108    if (hfrEnable && available_hfr_configs.array()) {
7109        available_capabilities.add(
7110                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7111    }
7112
7113    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7114        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7115    }
7116    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7117            available_capabilities.array(),
7118            available_capabilities.size());
7119
7120    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7121    //Assumption is that all bayer cameras support MANUAL_SENSOR.
7122    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7123            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7124
7125    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7126            &aeLockAvailable, 1);
7127
7128    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7129    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7130    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7131            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7132
7133    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7134            &awbLockAvailable, 1);
7135
7136    int32_t max_input_streams = 1;
7137    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7138                      &max_input_streams,
7139                      1);
7140
7141    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7142    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7143            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7144            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7145            HAL_PIXEL_FORMAT_YCbCr_420_888};
7146    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7147                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7148
7149    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7150    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7151                      &max_latency,
7152                      1);
7153
7154    int32_t isp_sensitivity_range[2];
7155    isp_sensitivity_range[0] =
7156        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7157    isp_sensitivity_range[1] =
7158        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7159    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7160                      isp_sensitivity_range,
7161                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7162
7163    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7164                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7165    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7166            available_hot_pixel_modes,
7167            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7168
7169    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7170                                         ANDROID_SHADING_MODE_FAST,
7171                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
7172    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7173                      available_shading_modes,
7174                      3);
7175
7176    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7177                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7178    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7179                      available_lens_shading_map_modes,
7180                      2);
7181
7182    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7183                                      ANDROID_EDGE_MODE_FAST,
7184                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
7185                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7186    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7187            available_edge_modes,
7188            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7189
7190    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7191                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
7192                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7193                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7194                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7195    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7196            available_noise_red_modes,
7197            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7198
7199    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7200                                         ANDROID_TONEMAP_MODE_FAST,
7201                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7202    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7203            available_tonemap_modes,
7204            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7205
7206    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7207    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7208            available_hot_pixel_map_modes,
7209            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7210
7211    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7212            gCamCapability[cameraId]->reference_illuminant1);
7213    if (NAME_NOT_FOUND != val) {
7214        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7215        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7216    }
7217
7218    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7219            gCamCapability[cameraId]->reference_illuminant2);
7220    if (NAME_NOT_FOUND != val) {
7221        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7222        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7223    }
7224
7225    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7226            (void *)gCamCapability[cameraId]->forward_matrix1,
7227            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7228
7229    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7230            (void *)gCamCapability[cameraId]->forward_matrix2,
7231            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7232
7233    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7234            (void *)gCamCapability[cameraId]->color_transform1,
7235            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7236
7237    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7238            (void *)gCamCapability[cameraId]->color_transform2,
7239            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7240
7241    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7242            (void *)gCamCapability[cameraId]->calibration_transform1,
7243            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7244
7245    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7246            (void *)gCamCapability[cameraId]->calibration_transform2,
7247            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7248
7249    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7250       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7251       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7252       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7253       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7254       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7255       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7256       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7257       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7258       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7259       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7260       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7261       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7262       ANDROID_JPEG_GPS_COORDINATES,
7263       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7264       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7265       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7266       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7267       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7268       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7269       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7270       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7271       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7272       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7273       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7274       ANDROID_STATISTICS_FACE_DETECT_MODE,
7275       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7276       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7277       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7278       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7279       /* DevCamDebug metadata request_keys_basic */
7280       DEVCAMDEBUG_META_ENABLE,
7281       /* DevCamDebug metadata end */
7282       };
7283
7284    size_t request_keys_cnt =
7285            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7286    Vector<int32_t> available_request_keys;
7287    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7288    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7289        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7290    }
7291
7292    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7293            available_request_keys.array(), available_request_keys.size());
7294
7295    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7296       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7297       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7298       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7299       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7300       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7301       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7302       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7303       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7304       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7305       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7306       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7307       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7308       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7309       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7310       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7311       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7312       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7313       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7314       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7315       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7316       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7317       ANDROID_STATISTICS_FACE_SCORES,
7318       NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7319       /* DevCamDebug metadata result_keys_basic */
7320       DEVCAMDEBUG_META_ENABLE,
7321       DEVCAMDEBUG_AF_LENS_POSITION,
7322       /* DevCamDebug metadata end */
7323       };
7324    size_t result_keys_cnt =
7325            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7326
7327    Vector<int32_t> available_result_keys;
7328    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7329    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7330        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7331    }
7332    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7333        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7334        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7335    }
7336    if (supportedFaceDetectMode == 1) {
7337        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7338        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7339    } else if ((supportedFaceDetectMode == 2) ||
7340            (supportedFaceDetectMode == 3)) {
7341        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7342        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7343    }
7344    if (hasBlackRegions) {
7345        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7346        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7347    }
7348    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7349            available_result_keys.array(), available_result_keys.size());
7350
7351    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7352       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7353       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7354       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7355       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7356       ANDROID_SCALER_CROPPING_TYPE,
7357       ANDROID_SYNC_MAX_LATENCY,
7358       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7359       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7360       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7361       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7362       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7363       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7364       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7365       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7366       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7367       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7368       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7369       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7370       ANDROID_LENS_FACING,
7371       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7372       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7373       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7374       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7375       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7376       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7377       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7378       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7379       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7380       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7381       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7382       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7383       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7384       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7385       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7386       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7387       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7388       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7389       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7390       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7391       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7392       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7393       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7394       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7395       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7396       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7397       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7398       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7399       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7400       ANDROID_CONTROL_AVAILABLE_MODES,
7401       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7402       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7403       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7404       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7405       ANDROID_SHADING_AVAILABLE_MODES,
7406       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7407       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7408
7409    Vector<int32_t> available_characteristics_keys;
7410    available_characteristics_keys.appendArray(characteristics_keys_basic,
7411            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7412    if (hasBlackRegions) {
7413        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7414    }
7415    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7416                      available_characteristics_keys.array(),
7417                      available_characteristics_keys.size());
7418
7419    /*available stall durations depend on the hw + sw and will be different for different devices */
7420    /*have to add for raw after implementation*/
7421    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7422    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7423
7424    Vector<int64_t> available_stall_durations;
7425    for (uint32_t j = 0; j < stall_formats_count; j++) {
7426        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7427            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7428                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7429                available_stall_durations.add(stall_formats[j]);
7430                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7431                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7432                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7433          }
7434        } else {
7435            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7436                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7437                available_stall_durations.add(stall_formats[j]);
7438                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7439                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7440                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7441            }
7442        }
7443    }
7444    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7445                      available_stall_durations.array(),
7446                      available_stall_durations.size());
7447
7448    //QCAMERA3_OPAQUE_RAW
7449    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7450    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7451    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7452    case LEGACY_RAW:
7453        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7454            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7455        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7456            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7457        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7458            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7459        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7460        break;
7461    case MIPI_RAW:
7462        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7463            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7464        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7465            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7466        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7467            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7468        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7469        break;
7470    default:
7471        LOGE("unknown opaque_raw_format %d",
7472                gCamCapability[cameraId]->opaque_raw_fmt);
7473        break;
7474    }
7475    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7476
7477    Vector<int32_t> strides;
7478    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7479            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7480        cam_stream_buf_plane_info_t buf_planes;
7481        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7482        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7483        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7484            &gCamCapability[cameraId]->padding_info, &buf_planes);
7485        strides.add(buf_planes.plane_info.mp[0].stride);
7486    }
7487    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7488            strides.size());
7489
7490    Vector<int32_t> opaque_size;
7491    for (size_t j = 0; j < scalar_formats_count; j++) {
7492        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7493            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7494                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7495                cam_stream_buf_plane_info_t buf_planes;
7496
7497                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7498                         &gCamCapability[cameraId]->padding_info, &buf_planes);
7499
7500                if (rc == 0) {
7501                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7502                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7503                    opaque_size.add(buf_planes.plane_info.frame_len);
7504                }else {
7505                    LOGE("raw frame calculation failed!");
7506                }
7507            }
7508        }
7509    }
7510
7511    if ((opaque_size.size() > 0) &&
7512            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7513        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7514    else
7515        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7516
7517    gStaticMetadata[cameraId] = staticInfo.release();
7518    return rc;
7519}
7520
7521/*===========================================================================
7522 * FUNCTION   : makeTable
7523 *
7524 * DESCRIPTION: make a table of sizes
7525 *
7526 * PARAMETERS :
7527 *
7528 *
7529 *==========================================================================*/
7530void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7531        size_t max_size, int32_t *sizeTable)
7532{
7533    size_t j = 0;
7534    if (size > max_size) {
7535       size = max_size;
7536    }
7537    for (size_t i = 0; i < size; i++) {
7538        sizeTable[j] = dimTable[i].width;
7539        sizeTable[j+1] = dimTable[i].height;
7540        j+=2;
7541    }
7542}
7543
7544/*===========================================================================
7545 * FUNCTION   : makeFPSTable
7546 *
7547 * DESCRIPTION: make a table of fps ranges
7548 *
7549 * PARAMETERS :
7550 *
7551 *==========================================================================*/
7552void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7553        size_t max_size, int32_t *fpsRangesTable)
7554{
7555    size_t j = 0;
7556    if (size > max_size) {
7557       size = max_size;
7558    }
7559    for (size_t i = 0; i < size; i++) {
7560        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7561        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7562        j+=2;
7563    }
7564}
7565
7566/*===========================================================================
7567 * FUNCTION   : makeOverridesList
7568 *
7569 * DESCRIPTION: make a list of scene mode overrides
7570 *
7571 * PARAMETERS :
7572 *
7573 *
7574 *==========================================================================*/
7575void QCamera3HardwareInterface::makeOverridesList(
7576        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7577        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7578{
7579    /*daemon will give a list of overrides for all scene modes.
7580      However we should send the fwk only the overrides for the scene modes
7581      supported by the framework*/
7582    size_t j = 0;
7583    if (size > max_size) {
7584       size = max_size;
7585    }
7586    size_t focus_count = CAM_FOCUS_MODE_MAX;
7587    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7588            focus_count);
7589    for (size_t i = 0; i < size; i++) {
7590        bool supt = false;
7591        size_t index = supported_indexes[i];
7592        overridesList[j] = gCamCapability[camera_id]->flash_available ?
7593                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7594        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7595                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7596                overridesTable[index].awb_mode);
7597        if (NAME_NOT_FOUND != val) {
7598            overridesList[j+1] = (uint8_t)val;
7599        }
7600        uint8_t focus_override = overridesTable[index].af_mode;
7601        for (size_t k = 0; k < focus_count; k++) {
7602           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7603              supt = true;
7604              break;
7605           }
7606        }
7607        if (supt) {
7608            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7609                    focus_override);
7610            if (NAME_NOT_FOUND != val) {
7611                overridesList[j+2] = (uint8_t)val;
7612            }
7613        } else {
7614           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7615        }
7616        j+=3;
7617    }
7618}
7619
7620/*===========================================================================
7621 * FUNCTION   : filterJpegSizes
7622 *
7623 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7624 *              could be downscaled to
7625 *
7626 * PARAMETERS :
7627 *
7628 * RETURN     : length of jpegSizes array
7629 *==========================================================================*/
7630
7631size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7632        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7633        uint8_t downscale_factor)
7634{
7635    if (0 == downscale_factor) {
7636        downscale_factor = 1;
7637    }
7638
7639    int32_t min_width = active_array_size.width / downscale_factor;
7640    int32_t min_height = active_array_size.height / downscale_factor;
7641    size_t jpegSizesCnt = 0;
7642    if (processedSizesCnt > maxCount) {
7643        processedSizesCnt = maxCount;
7644    }
7645    for (size_t i = 0; i < processedSizesCnt; i+=2) {
7646        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7647            jpegSizes[jpegSizesCnt] = processedSizes[i];
7648            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7649            jpegSizesCnt += 2;
7650        }
7651    }
7652    return jpegSizesCnt;
7653}
7654
7655/*===========================================================================
7656 * FUNCTION   : computeNoiseModelEntryS
7657 *
7658 * DESCRIPTION: function to map a given sensitivity to the S noise
7659 *              model parameters in the DNG noise model.
7660 *
7661 * PARAMETERS : sens : the sensor sensitivity
7662 *
7663 ** RETURN    : S (sensor amplification) noise
7664 *
7665 *==========================================================================*/
7666double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7667    double s = gCamCapability[mCameraId]->gradient_S * sens +
7668            gCamCapability[mCameraId]->offset_S;
7669    return ((s < 0.0) ? 0.0 : s);
7670}
7671
7672/*===========================================================================
7673 * FUNCTION   : computeNoiseModelEntryO
7674 *
7675 * DESCRIPTION: function to map a given sensitivity to the O noise
7676 *              model parameters in the DNG noise model.
7677 *
7678 * PARAMETERS : sens : the sensor sensitivity
7679 *
7680 ** RETURN    : O (sensor readout) noise
7681 *
7682 *==========================================================================*/
7683double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7684    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7685    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7686            1.0 : (1.0 * sens / max_analog_sens);
7687    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7688            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7689    return ((o < 0.0) ? 0.0 : o);
7690}
7691
7692/*===========================================================================
7693 * FUNCTION   : getSensorSensitivity
7694 *
7695 * DESCRIPTION: convert iso_mode to an integer value
7696 *
7697 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7698 *
7699 ** RETURN    : sensitivity supported by sensor
7700 *
7701 *==========================================================================*/
7702int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7703{
7704    int32_t sensitivity;
7705
7706    switch (iso_mode) {
7707    case CAM_ISO_MODE_100:
7708        sensitivity = 100;
7709        break;
7710    case CAM_ISO_MODE_200:
7711        sensitivity = 200;
7712        break;
7713    case CAM_ISO_MODE_400:
7714        sensitivity = 400;
7715        break;
7716    case CAM_ISO_MODE_800:
7717        sensitivity = 800;
7718        break;
7719    case CAM_ISO_MODE_1600:
7720        sensitivity = 1600;
7721        break;
7722    default:
7723        sensitivity = -1;
7724        break;
7725    }
7726    return sensitivity;
7727}
7728
7729/*===========================================================================
7730 * FUNCTION   : getCamInfo
7731 *
7732 * DESCRIPTION: query camera capabilities
7733 *
7734 * PARAMETERS :
7735 *   @cameraId  : camera Id
7736 *   @info      : camera info struct to be filled in with camera capabilities
7737 *
7738 * RETURN     : int type of status
7739 *              NO_ERROR  -- success
7740 *              none-zero failure code
7741 *==========================================================================*/
7742int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7743        struct camera_info *info)
7744{
7745    ATRACE_CALL();
7746    int rc = 0;
7747
7748    pthread_mutex_lock(&gCamLock);
7749    if (NULL == gCamCapability[cameraId]) {
7750        rc = initCapabilities(cameraId);
7751        if (rc < 0) {
7752            pthread_mutex_unlock(&gCamLock);
7753            return rc;
7754        }
7755    }
7756
7757    if (NULL == gStaticMetadata[cameraId]) {
7758        rc = initStaticMetadata(cameraId);
7759        if (rc < 0) {
7760            pthread_mutex_unlock(&gCamLock);
7761            return rc;
7762        }
7763    }
7764
7765    switch(gCamCapability[cameraId]->position) {
7766    case CAM_POSITION_BACK:
7767        info->facing = CAMERA_FACING_BACK;
7768        break;
7769
7770    case CAM_POSITION_FRONT:
7771        info->facing = CAMERA_FACING_FRONT;
7772        break;
7773
7774    default:
7775        LOGE("Unknown position type for camera id:%d", cameraId);
7776        rc = -1;
7777        break;
7778    }
7779
7780
7781    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7782    info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
7783    info->static_camera_characteristics = gStaticMetadata[cameraId];
7784
7785    //For now assume both cameras can operate independently.
7786    info->conflicting_devices = NULL;
7787    info->conflicting_devices_length = 0;
7788
7789    //resource cost is 100 * MIN(1.0, m/M),
7790    //where m is throughput requirement with maximum stream configuration
7791    //and M is CPP maximum throughput.
7792    float max_fps = 0.0;
7793    for (uint32_t i = 0;
7794            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7795        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7796            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7797    }
7798    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7799            gCamCapability[cameraId]->active_array_size.width *
7800            gCamCapability[cameraId]->active_array_size.height * max_fps /
7801            gCamCapability[cameraId]->max_pixel_bandwidth;
7802    info->resource_cost = 100 * MIN(1.0, ratio);
7803    LOGI("camera %d resource cost is %d", cameraId,
7804            info->resource_cost);
7805
7806    pthread_mutex_unlock(&gCamLock);
7807    return rc;
7808}
7809
7810/*===========================================================================
7811 * FUNCTION   : translateCapabilityToMetadata
7812 *
7813 * DESCRIPTION: translate the capability into camera_metadata_t
7814 *
7815 * PARAMETERS : type of the request
7816 *
7817 *
7818 * RETURN     : success: camera_metadata_t*
7819 *              failure: NULL
7820 *
7821 *==========================================================================*/
7822camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7823{
7824    if (mDefaultMetadata[type] != NULL) {
7825        return mDefaultMetadata[type];
7826    }
7827    //first time we are handling this request
7828    //fill up the metadata structure using the wrapper class
7829    CameraMetadata settings;
7830    //translate from cam_capability_t to camera_metadata_tag_t
7831    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7832    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7833    int32_t defaultRequestID = 0;
7834    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7835
7836    /* OIS disable */
7837    char ois_prop[PROPERTY_VALUE_MAX];
7838    memset(ois_prop, 0, sizeof(ois_prop));
7839    property_get("persist.camera.ois.disable", ois_prop, "0");
7840    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7841
7842    /* Force video to use OIS */
7843    char videoOisProp[PROPERTY_VALUE_MAX];
7844    memset(videoOisProp, 0, sizeof(videoOisProp));
7845    property_get("persist.camera.ois.video", videoOisProp, "1");
7846    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7847
7848    // EIS enable/disable
7849    char eis_prop[PROPERTY_VALUE_MAX];
7850    memset(eis_prop, 0, sizeof(eis_prop));
7851    property_get("persist.camera.eis.enable", eis_prop, "0");
7852    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7853
7854    // Hybrid AE enable/disable
7855    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
7856    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
7857    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
7858    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
7859
7860    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7861    // This is a bit hacky. EIS is enabled only when the above setprop
7862    // is set to non-zero value and on back camera (for 2015 Nexus).
7863    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7864    // configureStream is called before this function. In other words,
7865    // we cannot guarantee the app will call configureStream before
7866    // calling createDefaultRequest.
7867    const bool eisEnabled = facingBack && eis_prop_set;
7868
7869    uint8_t controlIntent = 0;
7870    uint8_t focusMode;
7871    uint8_t vsMode;
7872    uint8_t optStabMode;
7873    uint8_t cacMode;
7874    uint8_t edge_mode;
7875    uint8_t noise_red_mode;
7876    uint8_t tonemap_mode;
7877    bool highQualityModeEntryAvailable = FALSE;
7878    bool fastModeEntryAvailable = FALSE;
7879    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7880    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7881    switch (type) {
7882      case CAMERA3_TEMPLATE_PREVIEW:
7883        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7884        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7885        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7886        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7887        edge_mode = ANDROID_EDGE_MODE_FAST;
7888        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7889        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7890        break;
7891      case CAMERA3_TEMPLATE_STILL_CAPTURE:
7892        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7893        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7894        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7895        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7896        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7897        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7898        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7899        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
7900        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
7901            if (gCamCapability[mCameraId]->aberration_modes[i] ==
7902                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
7903                highQualityModeEntryAvailable = TRUE;
7904            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
7905                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
7906                fastModeEntryAvailable = TRUE;
7907            }
7908        }
7909        if (highQualityModeEntryAvailable) {
7910            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7911        } else if (fastModeEntryAvailable) {
7912            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7913        }
7914        break;
7915      case CAMERA3_TEMPLATE_VIDEO_RECORD:
7916        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7917        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7918        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7919        if (eisEnabled) {
7920            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7921        }
7922        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7923        edge_mode = ANDROID_EDGE_MODE_FAST;
7924        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7925        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7926        if (forceVideoOis)
7927            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7928        break;
7929      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7930        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7931        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7932        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7933        if (eisEnabled) {
7934            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7935        }
7936        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7937        edge_mode = ANDROID_EDGE_MODE_FAST;
7938        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7939        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7940        if (forceVideoOis)
7941            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7942        break;
7943      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7944        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7945        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7946        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7947        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7948        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7949        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7950        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7951        break;
7952      case CAMERA3_TEMPLATE_MANUAL:
7953        edge_mode = ANDROID_EDGE_MODE_FAST;
7954        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7955        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7956        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7957        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7958        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7959        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7960        break;
7961      default:
7962        edge_mode = ANDROID_EDGE_MODE_FAST;
7963        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7964        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7965        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7966        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7967        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7968        break;
7969    }
7970    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7971    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7972    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7973    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7974        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7975    }
7976    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7977
7978    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7979            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7980        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7981    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7982            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7983            || ois_disable)
7984        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7985    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7986
7987    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7988            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7989
7990    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7991    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7992
7993    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7994    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7995
7996    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7997    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7998
7999    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8000    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8001
8002    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8003    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8004
8005    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8006    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8007
8008    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8009    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8010
8011    /*flash*/
8012    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8013    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8014
8015    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8016    settings.update(ANDROID_FLASH_FIRING_POWER,
8017            &flashFiringLevel, 1);
8018
8019    /* lens */
8020    float default_aperture = gCamCapability[mCameraId]->apertures[0];
8021    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8022
8023    if (gCamCapability[mCameraId]->filter_densities_count) {
8024        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8025        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8026                        gCamCapability[mCameraId]->filter_densities_count);
8027    }
8028
8029    float default_focal_length = gCamCapability[mCameraId]->focal_length;
8030    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8031
8032    float default_focus_distance = 0;
8033    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8034
8035    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8036    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8037
8038    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8039    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8040
8041    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8042    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8043
8044    /* face detection (default to OFF) */
8045    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8046    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8047
8048    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8049    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8050
8051    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8052    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8053
8054    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8055    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8056
8057    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8058    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
8059
8060    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8061    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8062
8063    /* Exposure time(Update the Min Exposure Time)*/
8064    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8065    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8066
8067    /* frame duration */
8068    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8069    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8070
8071    /* sensitivity */
8072    static const int32_t default_sensitivity = 100;
8073    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8074    static const int32_t default_isp_sensitivity =
8075            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8076    settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8077
8078    /*edge mode*/
8079    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8080
8081    /*noise reduction mode*/
8082    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8083
8084    /*color correction mode*/
8085    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8086    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8087
8088    /*transform matrix mode*/
8089    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8090
8091    int32_t scaler_crop_region[4];
8092    scaler_crop_region[0] = 0;
8093    scaler_crop_region[1] = 0;
8094    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8095    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8096    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8097
8098    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8099    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8100
8101    /*focus distance*/
8102    float focus_distance = 0.0;
8103    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8104
8105    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8106    /* Restrict default preview template to max 30 fps */
8107    float max_range = 0.0;
8108    float max_fixed_fps = 0.0;
8109    int32_t fps_range[2] = {0, 0};
8110    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8111            i++) {
8112        if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8113                TEMPLATE_MAX_PREVIEW_FPS) {
8114            continue;
8115        }
8116        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8117            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8118        if (type == CAMERA3_TEMPLATE_PREVIEW ||
8119                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8120                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8121            if (range > max_range) {
8122                fps_range[0] =
8123                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8124                fps_range[1] =
8125                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8126                max_range = range;
8127            }
8128        } else {
8129            if (range < 0.01 && max_fixed_fps <
8130                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8131                fps_range[0] =
8132                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8133                fps_range[1] =
8134                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8135                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8136            }
8137        }
8138    }
8139    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8140
8141    /*precapture trigger*/
8142    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8143    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8144
8145    /*af trigger*/
8146    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8147    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8148
8149    /* ae & af regions */
8150    int32_t active_region[] = {
8151            gCamCapability[mCameraId]->active_array_size.left,
8152            gCamCapability[mCameraId]->active_array_size.top,
8153            gCamCapability[mCameraId]->active_array_size.left +
8154                    gCamCapability[mCameraId]->active_array_size.width,
8155            gCamCapability[mCameraId]->active_array_size.top +
8156                    gCamCapability[mCameraId]->active_array_size.height,
8157            0};
8158    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8159            sizeof(active_region) / sizeof(active_region[0]));
8160    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8161            sizeof(active_region) / sizeof(active_region[0]));
8162
8163    /* black level lock */
8164    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8165    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8166
8167    /* lens shading map mode */
8168    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8169    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8170        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8171    }
8172    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8173
8174    //special defaults for manual template
8175    if (type == CAMERA3_TEMPLATE_MANUAL) {
8176        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8177        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8178
8179        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8180        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8181
8182        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8183        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8184
8185        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8186        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8187
8188        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8189        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8190
8191        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8192        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8193    }
8194
8195
8196    /* TNR
8197     * We'll use this location to determine which modes TNR will be set.
8198     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8199     * This is not to be confused with linking on a per stream basis that decision
8200     * is still on per-session basis and will be handled as part of config stream
8201     */
8202    uint8_t tnr_enable = 0;
8203
8204    if (m_bTnrPreview || m_bTnrVideo) {
8205
8206        switch (type) {
8207            case CAMERA3_TEMPLATE_VIDEO_RECORD:
8208            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8209                    tnr_enable = 1;
8210                    break;
8211
8212            default:
8213                    tnr_enable = 0;
8214                    break;
8215        }
8216
8217        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8218        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8219        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8220
8221        LOGD("TNR:%d with process plate %d for template:%d",
8222                             tnr_enable, tnr_process_type, type);
8223    }
8224
8225    //Update Link tags to default
8226    int32_t sync_type = CAM_TYPE_STANDALONE;
8227    settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8228
8229    int32_t is_main = 0; //this doesn't matter as app should overwrite
8230    settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8231
8232    settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8233
8234    /* CDS default */
8235    char prop[PROPERTY_VALUE_MAX];
8236    memset(prop, 0, sizeof(prop));
8237    property_get("persist.camera.CDS", prop, "Auto");
8238    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8239    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8240    if (CAM_CDS_MODE_MAX == cds_mode) {
8241        cds_mode = CAM_CDS_MODE_AUTO;
8242    }
8243
8244    /* Disabling CDS in templates which have TNR enabled*/
8245    if (tnr_enable)
8246        cds_mode = CAM_CDS_MODE_OFF;
8247
8248    int32_t mode = cds_mode;
8249    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8250
8251    /* hybrid ae */
8252    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8253
8254    mDefaultMetadata[type] = settings.release();
8255
8256    return mDefaultMetadata[type];
8257}
8258
8259/*===========================================================================
8260 * FUNCTION   : setFrameParameters
8261 *
8262 * DESCRIPTION: set parameters per frame as requested in the metadata from
8263 *              framework
8264 *
8265 * PARAMETERS :
8266 *   @request   : request that needs to be serviced
8267 *   @streamID : Stream ID of all the requested streams
8268 *   @blob_request: Whether this request is a blob request or not
8269 *
8270 * RETURN     : success: NO_ERROR
8271 *              failure:
8272 *==========================================================================*/
8273int QCamera3HardwareInterface::setFrameParameters(
8274                    camera3_capture_request_t *request,
8275                    cam_stream_ID_t streamID,
8276                    int blob_request,
8277                    uint32_t snapshotStreamId)
8278{
8279    /*translate from camera_metadata_t type to parm_type_t*/
8280    int rc = 0;
8281    int32_t hal_version = CAM_HAL_V3;
8282
8283    clear_metadata_buffer(mParameters);
8284    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8285        LOGE("Failed to set hal version in the parameters");
8286        return BAD_VALUE;
8287    }
8288
8289    /*we need to update the frame number in the parameters*/
8290    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8291            request->frame_number)) {
8292        LOGE("Failed to set the frame number in the parameters");
8293        return BAD_VALUE;
8294    }
8295
8296    /* Update stream id of all the requested buffers */
8297    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8298        LOGE("Failed to set stream type mask in the parameters");
8299        return BAD_VALUE;
8300    }
8301
8302    if (mUpdateDebugLevel) {
8303        uint32_t dummyDebugLevel = 0;
8304        /* The value of dummyDebugLevel is irrelavent. On
8305         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8306        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8307                dummyDebugLevel)) {
8308            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8309            return BAD_VALUE;
8310        }
8311        mUpdateDebugLevel = false;
8312    }
8313
8314    if(request->settings != NULL){
8315        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8316        if (blob_request)
8317            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8318    }
8319
8320    return rc;
8321}
8322
8323/*===========================================================================
8324 * FUNCTION   : setReprocParameters
8325 *
8326 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8327 *              return it.
8328 *
8329 * PARAMETERS :
8330 *   @request   : request that needs to be serviced
8331 *
8332 * RETURN     : success: NO_ERROR
8333 *              failure:
8334 *==========================================================================*/
8335int32_t QCamera3HardwareInterface::setReprocParameters(
8336        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8337        uint32_t snapshotStreamId)
8338{
8339    /*translate from camera_metadata_t type to parm_type_t*/
8340    int rc = 0;
8341
8342    if (NULL == request->settings){
8343        LOGE("Reprocess settings cannot be NULL");
8344        return BAD_VALUE;
8345    }
8346
8347    if (NULL == reprocParam) {
8348        LOGE("Invalid reprocessing metadata buffer");
8349        return BAD_VALUE;
8350    }
8351    clear_metadata_buffer(reprocParam);
8352
8353    /*we need to update the frame number in the parameters*/
8354    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8355            request->frame_number)) {
8356        LOGE("Failed to set the frame number in the parameters");
8357        return BAD_VALUE;
8358    }
8359
8360    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8361    if (rc < 0) {
8362        LOGE("Failed to translate reproc request");
8363        return rc;
8364    }
8365
8366    CameraMetadata frame_settings;
8367    frame_settings = request->settings;
8368    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8369            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8370        int32_t *crop_count =
8371                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8372        int32_t *crop_data =
8373                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8374        int32_t *roi_map =
8375                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8376        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8377            cam_crop_data_t crop_meta;
8378            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8379            crop_meta.num_of_streams = 1;
8380            crop_meta.crop_info[0].crop.left   = crop_data[0];
8381            crop_meta.crop_info[0].crop.top    = crop_data[1];
8382            crop_meta.crop_info[0].crop.width  = crop_data[2];
8383            crop_meta.crop_info[0].crop.height = crop_data[3];
8384
8385            crop_meta.crop_info[0].roi_map.left =
8386                    roi_map[0];
8387            crop_meta.crop_info[0].roi_map.top =
8388                    roi_map[1];
8389            crop_meta.crop_info[0].roi_map.width =
8390                    roi_map[2];
8391            crop_meta.crop_info[0].roi_map.height =
8392                    roi_map[3];
8393
8394            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8395                rc = BAD_VALUE;
8396            }
8397            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8398                    request->input_buffer->stream,
8399                    crop_meta.crop_info[0].crop.left,
8400                    crop_meta.crop_info[0].crop.top,
8401                    crop_meta.crop_info[0].crop.width,
8402                    crop_meta.crop_info[0].crop.height);
8403            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8404                    request->input_buffer->stream,
8405                    crop_meta.crop_info[0].roi_map.left,
8406                    crop_meta.crop_info[0].roi_map.top,
8407                    crop_meta.crop_info[0].roi_map.width,
8408                    crop_meta.crop_info[0].roi_map.height);
8409            } else {
8410                LOGE("Invalid reprocess crop count %d!", *crop_count);
8411            }
8412    } else {
8413        LOGE("No crop data from matching output stream");
8414    }
8415
8416    /* These settings are not needed for regular requests so handle them specially for
8417       reprocess requests; information needed for EXIF tags */
8418    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8419        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8420                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8421        if (NAME_NOT_FOUND != val) {
8422            uint32_t flashMode = (uint32_t)val;
8423            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8424                rc = BAD_VALUE;
8425            }
8426        } else {
8427            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8428                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8429        }
8430    } else {
8431        LOGH("No flash mode in reprocess settings");
8432    }
8433
8434    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8435        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8436        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8437            rc = BAD_VALUE;
8438        }
8439    } else {
8440        LOGH("No flash state in reprocess settings");
8441    }
8442
8443    return rc;
8444}
8445
8446/*===========================================================================
8447 * FUNCTION   : saveRequestSettings
8448 *
8449 * DESCRIPTION: Add any settings that might have changed to the request settings
8450 *              and save the settings to be applied on the frame
8451 *
8452 * PARAMETERS :
8453 *   @jpegMetadata : the extracted and/or modified jpeg metadata
8454 *   @request      : request with initial settings
8455 *
8456 * RETURN     :
8457 * camera_metadata_t* : pointer to the saved request settings
8458 *==========================================================================*/
8459camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8460        const CameraMetadata &jpegMetadata,
8461        camera3_capture_request_t *request)
8462{
8463    camera_metadata_t *resultMetadata;
8464    CameraMetadata camMetadata;
8465    camMetadata = request->settings;
8466
8467    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8468        int32_t thumbnail_size[2];
8469        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8470        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8471        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8472                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8473    }
8474
8475    resultMetadata = camMetadata.release();
8476    return resultMetadata;
8477}
8478
8479/*===========================================================================
8480 * FUNCTION   : setHalFpsRange
8481 *
8482 * DESCRIPTION: set FPS range parameter
8483 *
8484 *
8485 * PARAMETERS :
8486 *   @settings    : Metadata from framework
8487 *   @hal_metadata: Metadata buffer
8488 *
8489 *
8490 * RETURN     : success: NO_ERROR
8491 *              failure:
8492 *==========================================================================*/
8493int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8494        metadata_buffer_t *hal_metadata)
8495{
8496    int32_t rc = NO_ERROR;
8497    cam_fps_range_t fps_range;
8498    fps_range.min_fps = (float)
8499            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8500    fps_range.max_fps = (float)
8501            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8502    fps_range.video_min_fps = fps_range.min_fps;
8503    fps_range.video_max_fps = fps_range.max_fps;
8504
8505    LOGD("aeTargetFpsRange fps: [%f %f]",
8506            fps_range.min_fps, fps_range.max_fps);
8507    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8508     * follows:
8509     * ---------------------------------------------------------------|
8510     *      Video stream is absent in configure_streams               |
8511     *    (Camcorder preview before the first video record            |
8512     * ---------------------------------------------------------------|
8513     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8514     *                   |             |             | vid_min/max_fps|
8515     * ---------------------------------------------------------------|
8516     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8517     *                   |-------------|-------------|----------------|
8518     *                   |  [240, 240] |     240     |  [240, 240]    |
8519     * ---------------------------------------------------------------|
8520     *     Video stream is present in configure_streams               |
8521     * ---------------------------------------------------------------|
8522     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8523     *                   |             |             | vid_min/max_fps|
8524     * ---------------------------------------------------------------|
8525     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8526     * (camcorder prev   |-------------|-------------|----------------|
8527     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
8528     *  is stopped)      |             |             |                |
8529     * ---------------------------------------------------------------|
8530     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
8531     *                   |-------------|-------------|----------------|
8532     *                   |  [240, 240] |     240     |  [240, 240]    |
8533     * ---------------------------------------------------------------|
8534     * When Video stream is absent in configure_streams,
8535     * preview fps = sensor_fps / batchsize
8536     * Eg: for 240fps at batchSize 4, preview = 60fps
8537     *     for 120fps at batchSize 4, preview = 30fps
8538     *
8539     * When video stream is present in configure_streams, preview fps is as per
8540     * the ratio of preview buffers to video buffers requested in process
8541     * capture request
8542     */
8543    mBatchSize = 0;
8544    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8545        fps_range.min_fps = fps_range.video_max_fps;
8546        fps_range.video_min_fps = fps_range.video_max_fps;
8547        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8548                fps_range.max_fps);
8549        if (NAME_NOT_FOUND != val) {
8550            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8551            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8552                return BAD_VALUE;
8553            }
8554
8555            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8556                /* If batchmode is currently in progress and the fps changes,
8557                 * set the flag to restart the sensor */
8558                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8559                        (mHFRVideoFps != fps_range.max_fps)) {
8560                    mNeedSensorRestart = true;
8561                }
8562                mHFRVideoFps = fps_range.max_fps;
8563                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8564                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8565                    mBatchSize = MAX_HFR_BATCH_SIZE;
8566                }
8567             }
8568            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
8569
8570         }
8571    } else {
8572        /* HFR mode is session param in backend/ISP. This should be reset when
8573         * in non-HFR mode  */
8574        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8575        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8576            return BAD_VALUE;
8577        }
8578    }
8579    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8580        return BAD_VALUE;
8581    }
8582    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
8583            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8584    return rc;
8585}
8586
8587/*===========================================================================
8588 * FUNCTION   : translateToHalMetadata
8589 *
8590 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
8591 *
8592 *
8593 * PARAMETERS :
8594 *   @request  : request sent from framework
8595 *
8596 *
8597 * RETURN     : success: NO_ERROR
8598 *              failure:
8599 *==========================================================================*/
8600int QCamera3HardwareInterface::translateToHalMetadata
8601                                  (const camera3_capture_request_t *request,
8602                                   metadata_buffer_t *hal_metadata,
8603                                   uint32_t snapshotStreamId)
8604{
8605    int rc = 0;
8606    CameraMetadata frame_settings;
8607    frame_settings = request->settings;
8608
8609    /* Do not change the order of the following list unless you know what you are
8610     * doing.
8611     * The order is laid out in such a way that parameters in the front of the table
8612     * may be used to override the parameters later in the table. Examples are:
8613     * 1. META_MODE should precede AEC/AWB/AF MODE
8614     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
8615     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
8616     * 4. Any mode should precede it's corresponding settings
8617     */
8618    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
8619        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
8620        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
8621            rc = BAD_VALUE;
8622        }
8623        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8624        if (rc != NO_ERROR) {
8625            LOGE("extractSceneMode failed");
8626        }
8627    }
8628
8629    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8630        uint8_t fwk_aeMode =
8631            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8632        uint8_t aeMode;
8633        int32_t redeye;
8634
8635        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8636            aeMode = CAM_AE_MODE_OFF;
8637        } else {
8638            aeMode = CAM_AE_MODE_ON;
8639        }
8640        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8641            redeye = 1;
8642        } else {
8643            redeye = 0;
8644        }
8645
8646        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8647                fwk_aeMode);
8648        if (NAME_NOT_FOUND != val) {
8649            int32_t flashMode = (int32_t)val;
8650            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8651        }
8652
8653        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8654        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8655            rc = BAD_VALUE;
8656        }
8657    }
8658
8659    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8660        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8661        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8662                fwk_whiteLevel);
8663        if (NAME_NOT_FOUND != val) {
8664            uint8_t whiteLevel = (uint8_t)val;
8665            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8666                rc = BAD_VALUE;
8667            }
8668        }
8669    }
8670
8671    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8672        uint8_t fwk_cacMode =
8673                frame_settings.find(
8674                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8675        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8676                fwk_cacMode);
8677        if (NAME_NOT_FOUND != val) {
8678            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8679            bool entryAvailable = FALSE;
8680            // Check whether Frameworks set CAC mode is supported in device or not
8681            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8682                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
8683                    entryAvailable = TRUE;
8684                    break;
8685                }
8686            }
8687            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
8688            // If entry not found then set the device supported mode instead of frameworks mode i.e,
8689            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
8690            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
8691            if (entryAvailable == FALSE) {
8692                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8693                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8694                } else {
8695                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8696                        // High is not supported and so set the FAST as spec say's underlying
8697                        // device implementation can be the same for both modes.
8698                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
8699                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8700                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
8701                        // in order to avoid the fps drop due to high quality
8702                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8703                    } else {
8704                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8705                    }
8706                }
8707            }
8708            LOGD("Final cacMode is %d", cacMode);
8709            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8710                rc = BAD_VALUE;
8711            }
8712        } else {
8713            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
8714        }
8715    }
8716
8717    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8718        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8719        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8720                fwk_focusMode);
8721        if (NAME_NOT_FOUND != val) {
8722            uint8_t focusMode = (uint8_t)val;
8723            LOGD("set focus mode %d", focusMode);
8724            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8725                rc = BAD_VALUE;
8726            }
8727        }
8728    }
8729
8730    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8731        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8732        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8733                focalDistance)) {
8734            rc = BAD_VALUE;
8735        }
8736    }
8737
8738    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8739        uint8_t fwk_antibandingMode =
8740                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8741        int val = lookupHalName(ANTIBANDING_MODES_MAP,
8742                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8743        if (NAME_NOT_FOUND != val) {
8744            uint32_t hal_antibandingMode = (uint32_t)val;
8745            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8746                    hal_antibandingMode)) {
8747                rc = BAD_VALUE;
8748            }
8749        }
8750    }
8751
8752    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8753        int32_t expCompensation = frame_settings.find(
8754                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8755        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8756            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8757        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8758            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8759        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8760                expCompensation)) {
8761            rc = BAD_VALUE;
8762        }
8763    }
8764
8765    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8766        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8767        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8768            rc = BAD_VALUE;
8769        }
8770    }
8771    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8772        rc = setHalFpsRange(frame_settings, hal_metadata);
8773        if (rc != NO_ERROR) {
8774            LOGE("setHalFpsRange failed");
8775        }
8776    }
8777
8778    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8779        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8780        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8781            rc = BAD_VALUE;
8782        }
8783    }
8784
8785    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8786        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8787        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8788                fwk_effectMode);
8789        if (NAME_NOT_FOUND != val) {
8790            uint8_t effectMode = (uint8_t)val;
8791            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8792                rc = BAD_VALUE;
8793            }
8794        }
8795    }
8796
8797    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8798        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8799        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8800                colorCorrectMode)) {
8801            rc = BAD_VALUE;
8802        }
8803    }
8804
8805    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8806        cam_color_correct_gains_t colorCorrectGains;
8807        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8808            colorCorrectGains.gains[i] =
8809                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8810        }
8811        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8812                colorCorrectGains)) {
8813            rc = BAD_VALUE;
8814        }
8815    }
8816
8817    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8818        cam_color_correct_matrix_t colorCorrectTransform;
8819        cam_rational_type_t transform_elem;
8820        size_t num = 0;
8821        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8822           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8823              transform_elem.numerator =
8824                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8825              transform_elem.denominator =
8826                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8827              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8828              num++;
8829           }
8830        }
8831        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8832                colorCorrectTransform)) {
8833            rc = BAD_VALUE;
8834        }
8835    }
8836
8837    cam_trigger_t aecTrigger;
8838    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8839    aecTrigger.trigger_id = -1;
8840    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8841        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8842        aecTrigger.trigger =
8843            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8844        aecTrigger.trigger_id =
8845            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8846        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8847                aecTrigger)) {
8848            rc = BAD_VALUE;
8849        }
8850        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
8851                aecTrigger.trigger, aecTrigger.trigger_id);
8852    }
8853
8854    /*af_trigger must come with a trigger id*/
8855    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8856        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8857        cam_trigger_t af_trigger;
8858        af_trigger.trigger =
8859            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8860        af_trigger.trigger_id =
8861            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8862        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8863            rc = BAD_VALUE;
8864        }
8865        LOGD("AfTrigger: %d AfTriggerID: %d",
8866                af_trigger.trigger, af_trigger.trigger_id);
8867    }
8868
8869    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8870        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8871        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8872            rc = BAD_VALUE;
8873        }
8874    }
8875    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8876        cam_edge_application_t edge_application;
8877        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8878        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8879            edge_application.sharpness = 0;
8880        } else {
8881            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8882        }
8883        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8884            rc = BAD_VALUE;
8885        }
8886    }
8887
8888    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8889        int32_t respectFlashMode = 1;
8890        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8891            uint8_t fwk_aeMode =
8892                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8893            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8894                respectFlashMode = 0;
8895                LOGH("AE Mode controls flash, ignore android.flash.mode");
8896            }
8897        }
8898        if (respectFlashMode) {
8899            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8900                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8901            LOGH("flash mode after mapping %d", val);
8902            // To check: CAM_INTF_META_FLASH_MODE usage
8903            if (NAME_NOT_FOUND != val) {
8904                uint8_t flashMode = (uint8_t)val;
8905                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8906                    rc = BAD_VALUE;
8907                }
8908            }
8909        }
8910    }
8911
8912    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8913        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8914        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8915            rc = BAD_VALUE;
8916        }
8917    }
8918
8919    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8920        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8921        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8922                flashFiringTime)) {
8923            rc = BAD_VALUE;
8924        }
8925    }
8926
8927    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8928        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8929        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8930                hotPixelMode)) {
8931            rc = BAD_VALUE;
8932        }
8933    }
8934
8935    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8936        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8937        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8938                lensAperture)) {
8939            rc = BAD_VALUE;
8940        }
8941    }
8942
8943    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8944        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8945        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8946                filterDensity)) {
8947            rc = BAD_VALUE;
8948        }
8949    }
8950
8951    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8952        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8953        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8954                focalLength)) {
8955            rc = BAD_VALUE;
8956        }
8957    }
8958
8959    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8960        uint8_t optStabMode =
8961                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8962        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8963                optStabMode)) {
8964            rc = BAD_VALUE;
8965        }
8966    }
8967
8968    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8969        uint8_t videoStabMode =
8970                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8971        LOGD("videoStabMode from APP = %d", videoStabMode);
8972        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8973                videoStabMode)) {
8974            rc = BAD_VALUE;
8975        }
8976    }
8977
8978
8979    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8980        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8981        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8982                noiseRedMode)) {
8983            rc = BAD_VALUE;
8984        }
8985    }
8986
8987    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8988        float reprocessEffectiveExposureFactor =
8989            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8990        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8991                reprocessEffectiveExposureFactor)) {
8992            rc = BAD_VALUE;
8993        }
8994    }
8995
8996    cam_crop_region_t scalerCropRegion;
8997    bool scalerCropSet = false;
8998    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8999        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9000        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9001        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9002        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9003
9004        // Map coordinate system from active array to sensor output.
9005        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9006                scalerCropRegion.width, scalerCropRegion.height);
9007
9008        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9009                scalerCropRegion)) {
9010            rc = BAD_VALUE;
9011        }
9012        scalerCropSet = true;
9013    }
9014
9015    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9016        int64_t sensorExpTime =
9017                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9018        LOGD("setting sensorExpTime %lld", sensorExpTime);
9019        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9020                sensorExpTime)) {
9021            rc = BAD_VALUE;
9022        }
9023    }
9024
9025    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9026        int64_t sensorFrameDuration =
9027                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9028        int64_t minFrameDuration = getMinFrameDuration(request);
9029        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9030        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9031            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9032        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9033        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9034                sensorFrameDuration)) {
9035            rc = BAD_VALUE;
9036        }
9037    }
9038
9039    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9040        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9041        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9042                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9043        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9044                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9045        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9046        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9047                sensorSensitivity)) {
9048            rc = BAD_VALUE;
9049        }
9050    }
9051
9052    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9053        int32_t ispSensitivity =
9054            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9055        if (ispSensitivity <
9056            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9057                ispSensitivity =
9058                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9059                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9060        }
9061        if (ispSensitivity >
9062            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9063                ispSensitivity =
9064                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9065                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9066        }
9067        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9068                ispSensitivity)) {
9069            rc = BAD_VALUE;
9070        }
9071    }
9072
9073    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9074        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9075        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9076            rc = BAD_VALUE;
9077        }
9078    }
9079
9080    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9081        uint8_t fwk_facedetectMode =
9082                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9083
9084        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9085                fwk_facedetectMode);
9086
9087        if (NAME_NOT_FOUND != val) {
9088            uint8_t facedetectMode = (uint8_t)val;
9089            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9090                    facedetectMode)) {
9091                rc = BAD_VALUE;
9092            }
9093        }
9094    }
9095
9096    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9097        uint8_t histogramMode =
9098                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9099        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9100                histogramMode)) {
9101            rc = BAD_VALUE;
9102        }
9103    }
9104
9105    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9106        uint8_t sharpnessMapMode =
9107                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9108        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9109                sharpnessMapMode)) {
9110            rc = BAD_VALUE;
9111        }
9112    }
9113
9114    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9115        uint8_t tonemapMode =
9116                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9117        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9118            rc = BAD_VALUE;
9119        }
9120    }
9121    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9122    /*All tonemap channels will have the same number of points*/
9123    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9124        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9125        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9126        cam_rgb_tonemap_curves tonemapCurves;
9127        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9128        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9129            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9130                     tonemapCurves.tonemap_points_cnt,
9131                    CAM_MAX_TONEMAP_CURVE_SIZE);
9132            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9133        }
9134
9135        /* ch0 = G*/
9136        size_t point = 0;
9137        cam_tonemap_curve_t tonemapCurveGreen;
9138        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9139            for (size_t j = 0; j < 2; j++) {
9140               tonemapCurveGreen.tonemap_points[i][j] =
9141                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9142               point++;
9143            }
9144        }
9145        tonemapCurves.curves[0] = tonemapCurveGreen;
9146
9147        /* ch 1 = B */
9148        point = 0;
9149        cam_tonemap_curve_t tonemapCurveBlue;
9150        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9151            for (size_t j = 0; j < 2; j++) {
9152               tonemapCurveBlue.tonemap_points[i][j] =
9153                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9154               point++;
9155            }
9156        }
9157        tonemapCurves.curves[1] = tonemapCurveBlue;
9158
9159        /* ch 2 = R */
9160        point = 0;
9161        cam_tonemap_curve_t tonemapCurveRed;
9162        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9163            for (size_t j = 0; j < 2; j++) {
9164               tonemapCurveRed.tonemap_points[i][j] =
9165                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9166               point++;
9167            }
9168        }
9169        tonemapCurves.curves[2] = tonemapCurveRed;
9170
9171        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9172                tonemapCurves)) {
9173            rc = BAD_VALUE;
9174        }
9175    }
9176
9177    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9178        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9179        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9180                captureIntent)) {
9181            rc = BAD_VALUE;
9182        }
9183    }
9184
9185    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9186        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9187        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9188                blackLevelLock)) {
9189            rc = BAD_VALUE;
9190        }
9191    }
9192
9193    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9194        uint8_t lensShadingMapMode =
9195                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9196        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9197                lensShadingMapMode)) {
9198            rc = BAD_VALUE;
9199        }
9200    }
9201
9202    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9203        cam_area_t roi;
9204        bool reset = true;
9205        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9206
9207        // Map coordinate system from active array to sensor output.
9208        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9209                roi.rect.height);
9210
9211        if (scalerCropSet) {
9212            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9213        }
9214        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9215            rc = BAD_VALUE;
9216        }
9217    }
9218
9219    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9220        cam_area_t roi;
9221        bool reset = true;
9222        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9223
9224        // Map coordinate system from active array to sensor output.
9225        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9226                roi.rect.height);
9227
9228        if (scalerCropSet) {
9229            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9230        }
9231        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9232            rc = BAD_VALUE;
9233        }
9234    }
9235
9236    // CDS for non-HFR non-video mode
9237    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9238            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9239        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9240        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9241            LOGE("Invalid CDS mode %d!", *fwk_cds);
9242        } else {
9243            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9244                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9245                rc = BAD_VALUE;
9246            }
9247        }
9248    }
9249
9250    // TNR
9251    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9252        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9253        uint8_t b_TnrRequested = 0;
9254        cam_denoise_param_t tnr;
9255        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9256        tnr.process_plates =
9257            (cam_denoise_process_type_t)frame_settings.find(
9258            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9259        b_TnrRequested = tnr.denoise_enable;
9260        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9261            rc = BAD_VALUE;
9262        }
9263    }
9264
9265    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9266        int32_t fwk_testPatternMode =
9267                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9268        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9269                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9270
9271        if (NAME_NOT_FOUND != testPatternMode) {
9272            cam_test_pattern_data_t testPatternData;
9273            memset(&testPatternData, 0, sizeof(testPatternData));
9274            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9275            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9276                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9277                int32_t *fwk_testPatternData =
9278                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9279                testPatternData.r = fwk_testPatternData[0];
9280                testPatternData.b = fwk_testPatternData[3];
9281                switch (gCamCapability[mCameraId]->color_arrangement) {
9282                    case CAM_FILTER_ARRANGEMENT_RGGB:
9283                    case CAM_FILTER_ARRANGEMENT_GRBG:
9284                        testPatternData.gr = fwk_testPatternData[1];
9285                        testPatternData.gb = fwk_testPatternData[2];
9286                        break;
9287                    case CAM_FILTER_ARRANGEMENT_GBRG:
9288                    case CAM_FILTER_ARRANGEMENT_BGGR:
9289                        testPatternData.gr = fwk_testPatternData[2];
9290                        testPatternData.gb = fwk_testPatternData[1];
9291                        break;
9292                    default:
9293                        LOGE("color arrangement %d is not supported",
9294                                gCamCapability[mCameraId]->color_arrangement);
9295                        break;
9296                }
9297            }
9298            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9299                    testPatternData)) {
9300                rc = BAD_VALUE;
9301            }
9302        } else {
9303            LOGE("Invalid framework sensor test pattern mode %d",
9304                    fwk_testPatternMode);
9305        }
9306    }
9307
9308    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9309        size_t count = 0;
9310        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9311        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9312                gps_coords.data.d, gps_coords.count, count);
9313        if (gps_coords.count != count) {
9314            rc = BAD_VALUE;
9315        }
9316    }
9317
9318    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9319        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9320        size_t count = 0;
9321        const char *gps_methods_src = (const char *)
9322                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9323        memset(gps_methods, '\0', sizeof(gps_methods));
9324        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9325        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9326                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9327        if (GPS_PROCESSING_METHOD_SIZE != count) {
9328            rc = BAD_VALUE;
9329        }
9330    }
9331
9332    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9333        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9334        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9335                gps_timestamp)) {
9336            rc = BAD_VALUE;
9337        }
9338    }
9339
9340    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9341        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9342        cam_rotation_info_t rotation_info;
9343        if (orientation == 0) {
9344           rotation_info.rotation = ROTATE_0;
9345        } else if (orientation == 90) {
9346           rotation_info.rotation = ROTATE_90;
9347        } else if (orientation == 180) {
9348           rotation_info.rotation = ROTATE_180;
9349        } else if (orientation == 270) {
9350           rotation_info.rotation = ROTATE_270;
9351        }
9352        rotation_info.streamId = snapshotStreamId;
9353        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9354        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9355            rc = BAD_VALUE;
9356        }
9357    }
9358
9359    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9360        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9361        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9362            rc = BAD_VALUE;
9363        }
9364    }
9365
9366    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9367        uint32_t thumb_quality = (uint32_t)
9368                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9369        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9370                thumb_quality)) {
9371            rc = BAD_VALUE;
9372        }
9373    }
9374
9375    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9376        cam_dimension_t dim;
9377        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9378        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9379        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9380            rc = BAD_VALUE;
9381        }
9382    }
9383
9384    // Internal metadata
9385    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9386        size_t count = 0;
9387        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9388        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9389                privatedata.data.i32, privatedata.count, count);
9390        if (privatedata.count != count) {
9391            rc = BAD_VALUE;
9392        }
9393    }
9394
9395    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
9396        uint8_t* use_av_timer =
9397                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
9398        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
9399            rc = BAD_VALUE;
9400        }
9401    }
9402
9403    // EV step
9404    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9405            gCamCapability[mCameraId]->exp_compensation_step)) {
9406        rc = BAD_VALUE;
9407    }
9408
9409    // CDS info
9410    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9411        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9412                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9413
9414        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9415                CAM_INTF_META_CDS_DATA, *cdsData)) {
9416            rc = BAD_VALUE;
9417        }
9418    }
9419
9420    // Hybrid AE
9421    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9422        uint8_t *hybrid_ae = (uint8_t *)
9423                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9424
9425        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9426                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9427            rc = BAD_VALUE;
9428        }
9429    }
9430
9431    return rc;
9432}
9433
9434/*===========================================================================
9435 * FUNCTION   : captureResultCb
9436 *
9437 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9438 *
9439 * PARAMETERS :
9440 *   @frame  : frame information from mm-camera-interface
9441 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9442 *   @userdata: userdata
9443 *
9444 * RETURN     : NONE
9445 *==========================================================================*/
9446void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9447                camera3_stream_buffer_t *buffer,
9448                uint32_t frame_number, bool isInputBuffer, void *userdata)
9449{
9450    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9451    if (hw == NULL) {
9452        LOGE("Invalid hw %p", hw);
9453        return;
9454    }
9455
9456    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9457    return;
9458}
9459
9460
9461/*===========================================================================
9462 * FUNCTION   : initialize
9463 *
9464 * DESCRIPTION: Pass framework callback pointers to HAL
9465 *
9466 * PARAMETERS :
9467 *
9468 *
9469 * RETURN     : Success : 0
9470 *              Failure: -ENODEV
9471 *==========================================================================*/
9472
9473int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9474                                  const camera3_callback_ops_t *callback_ops)
9475{
9476    LOGD("E");
9477    QCamera3HardwareInterface *hw =
9478        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9479    if (!hw) {
9480        LOGE("NULL camera device");
9481        return -ENODEV;
9482    }
9483
9484    int rc = hw->initialize(callback_ops);
9485    LOGD("X");
9486    return rc;
9487}
9488
9489/*===========================================================================
9490 * FUNCTION   : configure_streams
9491 *
9492 * DESCRIPTION:
9493 *
9494 * PARAMETERS :
9495 *
9496 *
9497 * RETURN     : Success: 0
9498 *              Failure: -EINVAL (if stream configuration is invalid)
9499 *                       -ENODEV (fatal error)
9500 *==========================================================================*/
9501
9502int QCamera3HardwareInterface::configure_streams(
9503        const struct camera3_device *device,
9504        camera3_stream_configuration_t *stream_list)
9505{
9506    LOGD("E");
9507    QCamera3HardwareInterface *hw =
9508        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9509    if (!hw) {
9510        LOGE("NULL camera device");
9511        return -ENODEV;
9512    }
9513    int rc = hw->configureStreams(stream_list);
9514    LOGD("X");
9515    return rc;
9516}
9517
9518/*===========================================================================
9519 * FUNCTION   : construct_default_request_settings
9520 *
9521 * DESCRIPTION: Configure a settings buffer to meet the required use case
9522 *
9523 * PARAMETERS :
9524 *
9525 *
9526 * RETURN     : Success: Return valid metadata
9527 *              Failure: Return NULL
9528 *==========================================================================*/
9529const camera_metadata_t* QCamera3HardwareInterface::
9530    construct_default_request_settings(const struct camera3_device *device,
9531                                        int type)
9532{
9533
9534    LOGD("E");
9535    camera_metadata_t* fwk_metadata = NULL;
9536    QCamera3HardwareInterface *hw =
9537        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9538    if (!hw) {
9539        LOGE("NULL camera device");
9540        return NULL;
9541    }
9542
9543    fwk_metadata = hw->translateCapabilityToMetadata(type);
9544
9545    LOGD("X");
9546    return fwk_metadata;
9547}
9548
9549/*===========================================================================
9550 * FUNCTION   : process_capture_request
9551 *
9552 * DESCRIPTION:
9553 *
9554 * PARAMETERS :
9555 *
9556 *
9557 * RETURN     :
9558 *==========================================================================*/
9559int QCamera3HardwareInterface::process_capture_request(
9560                    const struct camera3_device *device,
9561                    camera3_capture_request_t *request)
9562{
9563    LOGD("E");
9564    QCamera3HardwareInterface *hw =
9565        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9566    if (!hw) {
9567        LOGE("NULL camera device");
9568        return -EINVAL;
9569    }
9570
9571    int rc = hw->processCaptureRequest(request);
9572    LOGD("X");
9573    return rc;
9574}
9575
9576/*===========================================================================
9577 * FUNCTION   : dump
9578 *
9579 * DESCRIPTION:
9580 *
9581 * PARAMETERS :
9582 *
9583 *
9584 * RETURN     :
9585 *==========================================================================*/
9586
9587void QCamera3HardwareInterface::dump(
9588                const struct camera3_device *device, int fd)
9589{
9590    /* Log level property is read when "adb shell dumpsys media.camera" is
9591       called so that the log level can be controlled without restarting
9592       the media server */
9593    getLogLevel();
9594
9595    LOGD("E");
9596    QCamera3HardwareInterface *hw =
9597        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9598    if (!hw) {
9599        LOGE("NULL camera device");
9600        return;
9601    }
9602
9603    hw->dump(fd);
9604    LOGD("X");
9605    return;
9606}
9607
9608/*===========================================================================
9609 * FUNCTION   : flush
9610 *
9611 * DESCRIPTION:
9612 *
9613 * PARAMETERS :
9614 *
9615 *
9616 * RETURN     :
9617 *==========================================================================*/
9618
9619int QCamera3HardwareInterface::flush(
9620                const struct camera3_device *device)
9621{
9622    int rc;
9623    LOGD("E");
9624    QCamera3HardwareInterface *hw =
9625        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9626    if (!hw) {
9627        LOGE("NULL camera device");
9628        return -EINVAL;
9629    }
9630
9631    pthread_mutex_lock(&hw->mMutex);
9632    // Validate current state
9633    switch (hw->mState) {
9634        case STARTED:
9635            /* valid state */
9636            break;
9637
9638        case ERROR:
9639            pthread_mutex_unlock(&hw->mMutex);
9640            hw->handleCameraDeviceError();
9641            return -ENODEV;
9642
9643        default:
9644            LOGI("Flush returned during state %d", hw->mState);
9645            pthread_mutex_unlock(&hw->mMutex);
9646            return 0;
9647    }
9648    pthread_mutex_unlock(&hw->mMutex);
9649
9650    rc = hw->flush(true /* restart channels */ );
9651    LOGD("X");
9652    return rc;
9653}
9654
9655/*===========================================================================
9656 * FUNCTION   : close_camera_device
9657 *
9658 * DESCRIPTION:
9659 *
9660 * PARAMETERS :
9661 *
9662 *
9663 * RETURN     :
9664 *==========================================================================*/
9665int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
9666{
9667    int ret = NO_ERROR;
9668    QCamera3HardwareInterface *hw =
9669        reinterpret_cast<QCamera3HardwareInterface *>(
9670            reinterpret_cast<camera3_device_t *>(device)->priv);
9671    if (!hw) {
9672        LOGE("NULL camera device");
9673        return BAD_VALUE;
9674    }
9675
9676    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
9677    delete hw;
9678    LOGI("[KPI Perf]: X");
9679    return ret;
9680}
9681
9682/*===========================================================================
9683 * FUNCTION   : getWaveletDenoiseProcessPlate
9684 *
9685 * DESCRIPTION: query wavelet denoise process plate
9686 *
9687 * PARAMETERS : None
9688 *
9689 * RETURN     : WNR prcocess plate value
9690 *==========================================================================*/
9691cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9692{
9693    char prop[PROPERTY_VALUE_MAX];
9694    memset(prop, 0, sizeof(prop));
9695    property_get("persist.denoise.process.plates", prop, "0");
9696    int processPlate = atoi(prop);
9697    switch(processPlate) {
9698    case 0:
9699        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9700    case 1:
9701        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9702    case 2:
9703        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9704    case 3:
9705        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9706    default:
9707        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9708    }
9709}
9710
9711
9712/*===========================================================================
9713 * FUNCTION   : getTemporalDenoiseProcessPlate
9714 *
9715 * DESCRIPTION: query temporal denoise process plate
9716 *
9717 * PARAMETERS : None
9718 *
9719 * RETURN     : TNR prcocess plate value
9720 *==========================================================================*/
9721cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9722{
9723    char prop[PROPERTY_VALUE_MAX];
9724    memset(prop, 0, sizeof(prop));
9725    property_get("persist.tnr.process.plates", prop, "0");
9726    int processPlate = atoi(prop);
9727    switch(processPlate) {
9728    case 0:
9729        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9730    case 1:
9731        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9732    case 2:
9733        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9734    case 3:
9735        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9736    default:
9737        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9738    }
9739}
9740
9741
9742/*===========================================================================
9743 * FUNCTION   : extractSceneMode
9744 *
9745 * DESCRIPTION: Extract scene mode from frameworks set metadata
9746 *
9747 * PARAMETERS :
9748 *      @frame_settings: CameraMetadata reference
9749 *      @metaMode: ANDROID_CONTORL_MODE
9750 *      @hal_metadata: hal metadata structure
9751 *
9752 * RETURN     : None
9753 *==========================================================================*/
9754int32_t QCamera3HardwareInterface::extractSceneMode(
9755        const CameraMetadata &frame_settings, uint8_t metaMode,
9756        metadata_buffer_t *hal_metadata)
9757{
9758    int32_t rc = NO_ERROR;
9759
9760    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9761        camera_metadata_ro_entry entry =
9762                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9763        if (0 == entry.count)
9764            return rc;
9765
9766        uint8_t fwk_sceneMode = entry.data.u8[0];
9767
9768        int val = lookupHalName(SCENE_MODES_MAP,
9769                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9770                fwk_sceneMode);
9771        if (NAME_NOT_FOUND != val) {
9772            uint8_t sceneMode = (uint8_t)val;
9773            LOGD("sceneMode: %d", sceneMode);
9774            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9775                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9776                rc = BAD_VALUE;
9777            }
9778        }
9779    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9780            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9781        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9782        LOGD("sceneMode: %d", sceneMode);
9783        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9784                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9785            rc = BAD_VALUE;
9786        }
9787    }
9788    return rc;
9789}
9790
9791/*===========================================================================
9792 * FUNCTION   : needRotationReprocess
9793 *
9794 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9795 *
9796 * PARAMETERS : none
9797 *
9798 * RETURN     : true: needed
9799 *              false: no need
9800 *==========================================================================*/
9801bool QCamera3HardwareInterface::needRotationReprocess()
9802{
9803    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9804        // current rotation is not zero, and pp has the capability to process rotation
9805        LOGH("need do reprocess for rotation");
9806        return true;
9807    }
9808
9809    return false;
9810}
9811
9812/*===========================================================================
9813 * FUNCTION   : needReprocess
9814 *
9815 * DESCRIPTION: if reprocess in needed
9816 *
9817 * PARAMETERS : none
9818 *
9819 * RETURN     : true: needed
9820 *              false: no need
9821 *==========================================================================*/
9822bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
9823{
9824    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
9825        // TODO: add for ZSL HDR later
9826        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9827        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9828            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
9829            return true;
9830        } else {
9831            LOGH("already post processed frame");
9832            return false;
9833        }
9834    }
9835    return needRotationReprocess();
9836}
9837
9838/*===========================================================================
9839 * FUNCTION   : needJpegExifRotation
9840 *
9841 * DESCRIPTION: if rotation from jpeg is needed
9842 *
9843 * PARAMETERS : none
9844 *
9845 * RETURN     : true: needed
9846 *              false: no need
9847 *==========================================================================*/
9848bool QCamera3HardwareInterface::needJpegExifRotation()
9849{
9850   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9851    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9852       LOGD("Need use Jpeg EXIF Rotation");
9853       return true;
9854    }
9855    return false;
9856}
9857
9858/*===========================================================================
9859 * FUNCTION   : addOfflineReprocChannel
9860 *
9861 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9862 *              coming from input channel
9863 *
9864 * PARAMETERS :
9865 *   @config  : reprocess configuration
9866 *   @inputChHandle : pointer to the input (source) channel
9867 *
9868 *
9869 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
9870 *==========================================================================*/
9871QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9872        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9873{
9874    int32_t rc = NO_ERROR;
9875    QCamera3ReprocessChannel *pChannel = NULL;
9876
9877    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9878            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9879            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9880    if (NULL == pChannel) {
9881        LOGE("no mem for reprocess channel");
9882        return NULL;
9883    }
9884
9885    rc = pChannel->initialize(IS_TYPE_NONE);
9886    if (rc != NO_ERROR) {
9887        LOGE("init reprocess channel failed, ret = %d", rc);
9888        delete pChannel;
9889        return NULL;
9890    }
9891
9892    // pp feature config
9893    cam_pp_feature_config_t pp_config;
9894    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9895
9896    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9897    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
9898            & CAM_QCOM_FEATURE_DSDN) {
9899        //Use CPP CDS incase h/w supports it.
9900        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
9901        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
9902    }
9903    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9904        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
9905    }
9906
9907    rc = pChannel->addReprocStreamsFromSource(pp_config,
9908            config,
9909            IS_TYPE_NONE,
9910            mMetadataChannel);
9911
9912    if (rc != NO_ERROR) {
9913        delete pChannel;
9914        return NULL;
9915    }
9916    return pChannel;
9917}
9918
9919/*===========================================================================
9920 * FUNCTION   : getMobicatMask
9921 *
9922 * DESCRIPTION: returns mobicat mask
9923 *
9924 * PARAMETERS : none
9925 *
9926 * RETURN     : mobicat mask
9927 *
9928 *==========================================================================*/
9929uint8_t QCamera3HardwareInterface::getMobicatMask()
9930{
9931    return m_MobicatMask;
9932}
9933
9934/*===========================================================================
9935 * FUNCTION   : setMobicat
9936 *
9937 * DESCRIPTION: set Mobicat on/off.
9938 *
9939 * PARAMETERS :
9940 *   @params  : none
9941 *
9942 * RETURN     : int32_t type of status
9943 *              NO_ERROR  -- success
9944 *              none-zero failure code
9945 *==========================================================================*/
9946int32_t QCamera3HardwareInterface::setMobicat()
9947{
9948    char value [PROPERTY_VALUE_MAX];
9949    property_get("persist.camera.mobicat", value, "0");
9950    int32_t ret = NO_ERROR;
9951    uint8_t enableMobi = (uint8_t)atoi(value);
9952
9953    if (enableMobi) {
9954        tune_cmd_t tune_cmd;
9955        tune_cmd.type = SET_RELOAD_CHROMATIX;
9956        tune_cmd.module = MODULE_ALL;
9957        tune_cmd.value = TRUE;
9958        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9959                CAM_INTF_PARM_SET_VFE_COMMAND,
9960                tune_cmd);
9961
9962        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9963                CAM_INTF_PARM_SET_PP_COMMAND,
9964                tune_cmd);
9965    }
9966    m_MobicatMask = enableMobi;
9967
9968    return ret;
9969}
9970
9971/*===========================================================================
9972* FUNCTION   : getLogLevel
9973*
9974* DESCRIPTION: Reads the log level property into a variable
9975*
9976* PARAMETERS :
9977*   None
9978*
9979* RETURN     :
9980*   None
9981*==========================================================================*/
9982void QCamera3HardwareInterface::getLogLevel()
9983{
9984    char prop[PROPERTY_VALUE_MAX];
9985    uint32_t globalLogLevel = 0;
9986
9987    property_get("persist.camera.hal.debug", prop, "0");
9988    int val = atoi(prop);
9989    if (0 <= val) {
9990        gCamHal3LogLevel = (uint32_t)val;
9991    }
9992
9993    property_get("persist.camera.kpi.debug", prop, "1");
9994    gKpiDebugLevel = atoi(prop);
9995
9996    property_get("persist.camera.global.debug", prop, "0");
9997    val = atoi(prop);
9998    if (0 <= val) {
9999        globalLogLevel = (uint32_t)val;
10000    }
10001
10002    /* Highest log level among hal.logs and global.logs is selected */
10003    if (gCamHal3LogLevel < globalLogLevel)
10004        gCamHal3LogLevel = globalLogLevel;
10005
10006    return;
10007}
10008
10009/*===========================================================================
10010 * FUNCTION   : validateStreamRotations
10011 *
10012 * DESCRIPTION: Check if the rotations requested are supported
10013 *
10014 * PARAMETERS :
10015 *   @stream_list : streams to be configured
10016 *
10017 * RETURN     : NO_ERROR on success
10018 *              -EINVAL on failure
10019 *
10020 *==========================================================================*/
10021int QCamera3HardwareInterface::validateStreamRotations(
10022        camera3_stream_configuration_t *streamList)
10023{
10024    int rc = NO_ERROR;
10025
10026    /*
10027    * Loop through all streams requested in configuration
10028    * Check if unsupported rotations have been requested on any of them
10029    */
10030    for (size_t j = 0; j < streamList->num_streams; j++){
10031        camera3_stream_t *newStream = streamList->streams[j];
10032
10033        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10034        bool isImplDef = (newStream->format ==
10035                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10036        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10037                isImplDef);
10038
10039        if (isRotated && (!isImplDef || isZsl)) {
10040            LOGE("Error: Unsupported rotation of %d requested for stream"
10041                    "type:%d and stream format:%d",
10042                    newStream->rotation, newStream->stream_type,
10043                    newStream->format);
10044            rc = -EINVAL;
10045            break;
10046        }
10047    }
10048
10049    return rc;
10050}
10051
10052/*===========================================================================
10053* FUNCTION   : getFlashInfo
10054*
10055* DESCRIPTION: Retrieve information about whether the device has a flash.
10056*
10057* PARAMETERS :
10058*   @cameraId  : Camera id to query
10059*   @hasFlash  : Boolean indicating whether there is a flash device
10060*                associated with given camera
10061*   @flashNode : If a flash device exists, this will be its device node.
10062*
10063* RETURN     :
10064*   None
10065*==========================================================================*/
10066void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10067        bool& hasFlash,
10068        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10069{
10070    cam_capability_t* camCapability = gCamCapability[cameraId];
10071    if (NULL == camCapability) {
10072        hasFlash = false;
10073        flashNode[0] = '\0';
10074    } else {
10075        hasFlash = camCapability->flash_available;
10076        strlcpy(flashNode,
10077                (char*)camCapability->flash_dev_name,
10078                QCAMERA_MAX_FILEPATH_LENGTH);
10079    }
10080}
10081
10082/*===========================================================================
10083* FUNCTION   : getEepromVersionInfo
10084*
10085* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10086*
10087* PARAMETERS : None
10088*
10089* RETURN     : string describing EEPROM version
10090*              "\0" if no such info available
10091*==========================================================================*/
10092const char *QCamera3HardwareInterface::getEepromVersionInfo()
10093{
10094    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10095}
10096
10097/*===========================================================================
10098* FUNCTION   : getLdafCalib
10099*
10100* DESCRIPTION: Retrieve Laser AF calibration data
10101*
10102* PARAMETERS : None
10103*
10104* RETURN     : Two uint32_t describing laser AF calibration data
10105*              NULL if none is available.
10106*==========================================================================*/
10107const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10108{
10109    if (mLdafCalibExist) {
10110        return &mLdafCalib[0];
10111    } else {
10112        return NULL;
10113    }
10114}
10115
10116/*===========================================================================
10117 * FUNCTION   : dynamicUpdateMetaStreamInfo
10118 *
10119 * DESCRIPTION: This function:
10120 *             (1) stops all the channels
10121 *             (2) returns error on pending requests and buffers
10122 *             (3) sends metastream_info in setparams
10123 *             (4) starts all channels
10124 *             This is useful when sensor has to be restarted to apply any
10125 *             settings such as frame rate from a different sensor mode
10126 *
10127 * PARAMETERS : None
10128 *
10129 * RETURN     : NO_ERROR on success
10130 *              Error codes on failure
10131 *
10132 *==========================================================================*/
10133int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10134{
10135    ATRACE_CALL();
10136    int rc = NO_ERROR;
10137
10138    LOGD("E");
10139
10140    rc = stopAllChannels();
10141    if (rc < 0) {
10142        LOGE("stopAllChannels failed");
10143        return rc;
10144    }
10145
10146    rc = notifyErrorForPendingRequests();
10147    if (rc < 0) {
10148        LOGE("notifyErrorForPendingRequests failed");
10149        return rc;
10150    }
10151
10152    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10153        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10154                "Format:%d",
10155                mStreamConfigInfo.type[i],
10156                mStreamConfigInfo.stream_sizes[i].width,
10157                mStreamConfigInfo.stream_sizes[i].height,
10158                mStreamConfigInfo.postprocess_mask[i],
10159                mStreamConfigInfo.format[i]);
10160    }
10161
10162    /* Send meta stream info once again so that ISP can start */
10163    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10164            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10165    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10166            mParameters);
10167    if (rc < 0) {
10168        LOGE("set Metastreaminfo failed. Sensor mode does not change");
10169    }
10170
10171    rc = startAllChannels();
10172    if (rc < 0) {
10173        LOGE("startAllChannels failed");
10174        return rc;
10175    }
10176
10177    LOGD("X");
10178    return rc;
10179}
10180
10181/*===========================================================================
10182 * FUNCTION   : stopAllChannels
10183 *
10184 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10185 *
10186 * PARAMETERS : None
10187 *
10188 * RETURN     : NO_ERROR on success
10189 *              Error codes on failure
10190 *
10191 *==========================================================================*/
10192int32_t QCamera3HardwareInterface::stopAllChannels()
10193{
10194    int32_t rc = NO_ERROR;
10195
10196    LOGD("Stopping all channels");
10197    // Stop the Streams/Channels
10198    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10199        it != mStreamInfo.end(); it++) {
10200        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10201        if (channel) {
10202            channel->stop();
10203        }
10204        (*it)->status = INVALID;
10205    }
10206
10207    if (mSupportChannel) {
10208        mSupportChannel->stop();
10209    }
10210    if (mAnalysisChannel) {
10211        mAnalysisChannel->stop();
10212    }
10213    if (mRawDumpChannel) {
10214        mRawDumpChannel->stop();
10215    }
10216    if (mMetadataChannel) {
10217        /* If content of mStreamInfo is not 0, there is metadata stream */
10218        mMetadataChannel->stop();
10219    }
10220
10221    LOGD("All channels stopped");
10222    return rc;
10223}
10224
10225/*===========================================================================
10226 * FUNCTION   : startAllChannels
10227 *
10228 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10229 *
10230 * PARAMETERS : None
10231 *
10232 * RETURN     : NO_ERROR on success
10233 *              Error codes on failure
10234 *
10235 *==========================================================================*/
10236int32_t QCamera3HardwareInterface::startAllChannels()
10237{
10238    int32_t rc = NO_ERROR;
10239
10240    LOGD("Start all channels ");
10241    // Start the Streams/Channels
10242    if (mMetadataChannel) {
10243        /* If content of mStreamInfo is not 0, there is metadata stream */
10244        rc = mMetadataChannel->start();
10245        if (rc < 0) {
10246            LOGE("META channel start failed");
10247            return rc;
10248        }
10249    }
10250    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10251        it != mStreamInfo.end(); it++) {
10252        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10253        if (channel) {
10254            rc = channel->start();
10255            if (rc < 0) {
10256                LOGE("channel start failed");
10257                return rc;
10258            }
10259        }
10260    }
10261    if (mAnalysisChannel) {
10262        mAnalysisChannel->start();
10263    }
10264    if (mSupportChannel) {
10265        rc = mSupportChannel->start();
10266        if (rc < 0) {
10267            LOGE("Support channel start failed");
10268            return rc;
10269        }
10270    }
10271    if (mRawDumpChannel) {
10272        rc = mRawDumpChannel->start();
10273        if (rc < 0) {
10274            LOGE("RAW dump channel start failed");
10275            return rc;
10276        }
10277    }
10278
10279    LOGD("All channels started");
10280    return rc;
10281}
10282
10283/*===========================================================================
10284 * FUNCTION   : notifyErrorForPendingRequests
10285 *
10286 * DESCRIPTION: This function sends error for all the pending requests/buffers
10287 *
10288 * PARAMETERS : None
10289 *
10290 * RETURN     : Error codes
10291 *              NO_ERROR on success
10292 *
10293 *==========================================================================*/
10294int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10295{
10296    int32_t rc = NO_ERROR;
10297    unsigned int frameNum = 0;
10298    camera3_capture_result_t result;
10299    camera3_stream_buffer_t *pStream_Buf = NULL;
10300
10301    memset(&result, 0, sizeof(camera3_capture_result_t));
10302
10303    if (mPendingRequestsList.size() > 0) {
10304        pendingRequestIterator i = mPendingRequestsList.begin();
10305        frameNum = i->frame_number;
10306    } else {
10307        /* There might still be pending buffers even though there are
10308         no pending requests. Setting the frameNum to MAX so that
10309         all the buffers with smaller frame numbers are returned */
10310        frameNum = UINT_MAX;
10311    }
10312
10313    LOGH("Oldest frame num on mPendingRequestsList = %u",
10314       frameNum);
10315
10316    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10317            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10318
10319        if (req->frame_number < frameNum) {
10320            // Send Error notify to frameworks for each buffer for which
10321            // metadata buffer is already sent
10322            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10323                req->frame_number, req->mPendingBufferList.size());
10324
10325            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10326            if (NULL == pStream_Buf) {
10327                LOGE("No memory for pending buffers array");
10328                return NO_MEMORY;
10329            }
10330            memset(pStream_Buf, 0,
10331                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10332            result.result = NULL;
10333            result.frame_number = req->frame_number;
10334            result.num_output_buffers = req->mPendingBufferList.size();
10335            result.output_buffers = pStream_Buf;
10336
10337            size_t index = 0;
10338            for (auto info = req->mPendingBufferList.begin();
10339                info != req->mPendingBufferList.end(); ) {
10340
10341                camera3_notify_msg_t notify_msg;
10342                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10343                notify_msg.type = CAMERA3_MSG_ERROR;
10344                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10345                notify_msg.message.error.error_stream = info->stream;
10346                notify_msg.message.error.frame_number = req->frame_number;
10347                pStream_Buf[index].acquire_fence = -1;
10348                pStream_Buf[index].release_fence = -1;
10349                pStream_Buf[index].buffer = info->buffer;
10350                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10351                pStream_Buf[index].stream = info->stream;
10352                mCallbackOps->notify(mCallbackOps, &notify_msg);
10353                index++;
10354                // Remove buffer from list
10355                info = req->mPendingBufferList.erase(info);
10356            }
10357
10358            // Remove this request from Map
10359            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10360                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10361            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10362
10363            mCallbackOps->process_capture_result(mCallbackOps, &result);
10364
10365            delete [] pStream_Buf;
10366        } else {
10367
10368            // Go through the pending requests info and send error request to framework
10369            LOGE("Sending ERROR REQUEST for all pending requests");
10370            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10371
10372            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10373
10374            // Send error notify to frameworks
10375            camera3_notify_msg_t notify_msg;
10376            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10377            notify_msg.type = CAMERA3_MSG_ERROR;
10378            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10379            notify_msg.message.error.error_stream = NULL;
10380            notify_msg.message.error.frame_number = req->frame_number;
10381            mCallbackOps->notify(mCallbackOps, &notify_msg);
10382
10383            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10384            if (NULL == pStream_Buf) {
10385                LOGE("No memory for pending buffers array");
10386                return NO_MEMORY;
10387            }
10388            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10389
10390            result.result = NULL;
10391            result.frame_number = req->frame_number;
10392            result.input_buffer = i->input_buffer;
10393            result.num_output_buffers = req->mPendingBufferList.size();
10394            result.output_buffers = pStream_Buf;
10395
10396            size_t index = 0;
10397            for (auto info = req->mPendingBufferList.begin();
10398                info != req->mPendingBufferList.end(); ) {
10399                pStream_Buf[index].acquire_fence = -1;
10400                pStream_Buf[index].release_fence = -1;
10401                pStream_Buf[index].buffer = info->buffer;
10402                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10403                pStream_Buf[index].stream = info->stream;
10404                index++;
10405                // Remove buffer from list
10406                info = req->mPendingBufferList.erase(info);
10407            }
10408
10409            // Remove this request from Map
10410            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10411                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10412            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10413
10414            mCallbackOps->process_capture_result(mCallbackOps, &result);
10415            delete [] pStream_Buf;
10416            i = erasePendingRequest(i);
10417        }
10418    }
10419
10420    /* Reset pending frame Drop list and requests list */
10421    mPendingFrameDropList.clear();
10422
10423    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10424        req.mPendingBufferList.clear();
10425    }
10426    mPendingBuffersMap.mPendingBuffersInRequest.clear();
10427    mPendingReprocessResultList.clear();
10428    LOGH("Cleared all the pending buffers ");
10429
10430    return rc;
10431}
10432
10433bool QCamera3HardwareInterface::isOnEncoder(
10434        const cam_dimension_t max_viewfinder_size,
10435        uint32_t width, uint32_t height)
10436{
10437    return (width > (uint32_t)max_viewfinder_size.width ||
10438            height > (uint32_t)max_viewfinder_size.height);
10439}
10440
10441/*===========================================================================
10442 * FUNCTION   : setBundleInfo
10443 *
10444 * DESCRIPTION: Set bundle info for all streams that are bundle.
10445 *
10446 * PARAMETERS : None
10447 *
10448 * RETURN     : NO_ERROR on success
10449 *              Error codes on failure
10450 *==========================================================================*/
10451int32_t QCamera3HardwareInterface::setBundleInfo()
10452{
10453    int32_t rc = NO_ERROR;
10454
10455    if (mChannelHandle) {
10456        cam_bundle_config_t bundleInfo;
10457        memset(&bundleInfo, 0, sizeof(bundleInfo));
10458        rc = mCameraHandle->ops->get_bundle_info(
10459                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10460        if (rc != NO_ERROR) {
10461            LOGE("get_bundle_info failed");
10462            return rc;
10463        }
10464        if (mAnalysisChannel) {
10465            mAnalysisChannel->setBundleInfo(bundleInfo);
10466        }
10467        if (mSupportChannel) {
10468            mSupportChannel->setBundleInfo(bundleInfo);
10469        }
10470        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10471                it != mStreamInfo.end(); it++) {
10472            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10473            channel->setBundleInfo(bundleInfo);
10474        }
10475        if (mRawDumpChannel) {
10476            mRawDumpChannel->setBundleInfo(bundleInfo);
10477        }
10478    }
10479
10480    return rc;
10481}
10482
10483/*===========================================================================
10484 * FUNCTION   : get_num_overall_buffers
10485 *
10486 * DESCRIPTION: Estimate number of pending buffers across all requests.
10487 *
10488 * PARAMETERS : None
10489 *
10490 * RETURN     : Number of overall pending buffers
10491 *
10492 *==========================================================================*/
10493uint32_t PendingBuffersMap::get_num_overall_buffers()
10494{
10495    uint32_t sum_buffers = 0;
10496    for (auto &req : mPendingBuffersInRequest) {
10497        sum_buffers += req.mPendingBufferList.size();
10498    }
10499    return sum_buffers;
10500}
10501
10502/*===========================================================================
10503 * FUNCTION   : removeBuf
10504 *
10505 * DESCRIPTION: Remove a matching buffer from tracker.
10506 *
10507 * PARAMETERS : @buffer: image buffer for the callback
10508 *
10509 * RETURN     : None
10510 *
10511 *==========================================================================*/
10512void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10513{
10514    bool buffer_found = false;
10515    for (auto req = mPendingBuffersInRequest.begin();
10516            req != mPendingBuffersInRequest.end(); req++) {
10517        for (auto k = req->mPendingBufferList.begin();
10518                k != req->mPendingBufferList.end(); k++ ) {
10519            if (k->buffer == buffer) {
10520                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10521                        req->frame_number, buffer);
10522                k = req->mPendingBufferList.erase(k);
10523                if (req->mPendingBufferList.empty()) {
10524                    // Remove this request from Map
10525                    req = mPendingBuffersInRequest.erase(req);
10526                }
10527                buffer_found = true;
10528                break;
10529            }
10530        }
10531        if (buffer_found) {
10532            break;
10533        }
10534    }
10535    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
10536            get_num_overall_buffers());
10537}
10538
10539/*===========================================================================
10540 * FUNCTION   : setPAAFSupport
10541 *
10542 * DESCRIPTION: Set the preview-assisted auto focus support bit in
10543 *              feature mask according to stream type and filter
10544 *              arrangement
10545 *
10546 * PARAMETERS : @feature_mask: current feature mask, which may be modified
10547 *              @stream_type: stream type
10548 *              @filter_arrangement: filter arrangement
10549 *
10550 * RETURN     : None
10551 *==========================================================================*/
10552void QCamera3HardwareInterface::setPAAFSupport(
10553        cam_feature_mask_t& feature_mask,
10554        cam_stream_type_t stream_type,
10555        cam_color_filter_arrangement_t filter_arrangement)
10556{
10557    LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
10558            feature_mask, stream_type, filter_arrangement);
10559
10560    switch (filter_arrangement) {
10561    case CAM_FILTER_ARRANGEMENT_RGGB:
10562    case CAM_FILTER_ARRANGEMENT_GRBG:
10563    case CAM_FILTER_ARRANGEMENT_GBRG:
10564    case CAM_FILTER_ARRANGEMENT_BGGR:
10565        if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
10566                (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
10567                (stream_type == CAM_STREAM_TYPE_VIDEO)) {
10568            feature_mask |= CAM_QCOM_FEATURE_PAAF;
10569        }
10570        break;
10571    case CAM_FILTER_ARRANGEMENT_Y:
10572        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
10573            feature_mask |= CAM_QCOM_FEATURE_PAAF;
10574        }
10575        break;
10576    default:
10577        break;
10578    }
10579}
10580}; //end namespace qcamera
10581