QCamera3HWI.cpp revision 28263f1f90ddbc15ece167cca540ec21132e188a
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <sync/sync.h>
44#include "gralloc_priv.h"
45
46// Display dependencies
47#include "qdMetaData.h"
48
49// Camera dependencies
50#include "android/QCamera3External.h"
51#include "util/QCameraFlash.h"
52#include "QCamera3HWI.h"
53#include "QCamera3VendorTags.h"
54#include "QCameraTrace.h"
55
56extern "C" {
57#include "mm_camera_dbg.h"
58}
59
60using namespace android;
61
62namespace qcamera {
63
64#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
65
66#define EMPTY_PIPELINE_DELAY 2
67#define PARTIAL_RESULT_COUNT 2
68#define FRAME_SKIP_DELAY     0
69
70#define MAX_VALUE_8BIT ((1<<8)-1)
71#define MAX_VALUE_10BIT ((1<<10)-1)
72#define MAX_VALUE_12BIT ((1<<12)-1)
73
74#define VIDEO_4K_WIDTH  3840
75#define VIDEO_4K_HEIGHT 2160
76
77#define MAX_EIS_WIDTH 1920
78#define MAX_EIS_HEIGHT 1080
79
80#define MAX_RAW_STREAMS        1
81#define MAX_STALLING_STREAMS   1
82#define MAX_PROCESSED_STREAMS  3
83/* Batch mode is enabled only if FPS set is equal to or greater than this */
84#define MIN_FPS_FOR_BATCH_MODE (120)
85#define PREVIEW_FPS_FOR_HFR    (30)
86#define DEFAULT_VIDEO_FPS      (30.0)
87#define MAX_HFR_BATCH_SIZE     (8)
88#define REGIONS_TUPLE_COUNT    5
89#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
90#define BURST_REPROCESS_PERF_TIME_OUT  (1000) // milliseconds
91
92#define FLUSH_TIMEOUT 3
93
94#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
95
96#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
97                                              CAM_QCOM_FEATURE_CROP |\
98                                              CAM_QCOM_FEATURE_ROTATION |\
99                                              CAM_QCOM_FEATURE_SHARPNESS |\
100                                              CAM_QCOM_FEATURE_SCALE |\
101                                              CAM_QCOM_FEATURE_CAC |\
102                                              CAM_QCOM_FEATURE_CDS )
103/* Per configuration size for static metadata length*/
104#define PER_CONFIGURATION_SIZE_3 (3)
105
106#define TIMEOUT_NEVER -1
107
108cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
109const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
110extern pthread_mutex_t gCamLock;
111volatile uint32_t gCamHal3LogLevel = 1;
112extern uint8_t gNumCameraSessions;
113
114const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
115    {"On",  CAM_CDS_MODE_ON},
116    {"Off", CAM_CDS_MODE_OFF},
117    {"Auto",CAM_CDS_MODE_AUTO}
118};
119
120const QCamera3HardwareInterface::QCameraMap<
121        camera_metadata_enum_android_control_effect_mode_t,
122        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
123    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
124    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
125    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
126    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
127    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
128    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
129    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
130    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
132};
133
134const QCamera3HardwareInterface::QCameraMap<
135        camera_metadata_enum_android_control_awb_mode_t,
136        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
137    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
138    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
139    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
140    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
141    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
142    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
143    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
145    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
146};
147
148const QCamera3HardwareInterface::QCameraMap<
149        camera_metadata_enum_android_control_scene_mode_t,
150        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
151    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
152    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
153    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
154    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
155    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
157    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
158    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
159    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
160    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
161    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
162    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
163    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
164    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
165    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
166    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
167};
168
169const QCamera3HardwareInterface::QCameraMap<
170        camera_metadata_enum_android_control_af_mode_t,
171        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
172    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
174    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
175    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
176    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
177    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_color_correction_aberration_mode_t,
183        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
184    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
185            CAM_COLOR_CORRECTION_ABERRATION_OFF },
186    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
187            CAM_COLOR_CORRECTION_ABERRATION_FAST },
188    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
189            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
190};
191
192const QCamera3HardwareInterface::QCameraMap<
193        camera_metadata_enum_android_control_ae_antibanding_mode_t,
194        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
195    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_control_ae_mode_t,
203        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
204    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
205    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
207    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
208    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
209};
210
211const QCamera3HardwareInterface::QCameraMap<
212        camera_metadata_enum_android_flash_mode_t,
213        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
214    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
215    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
216    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220        camera_metadata_enum_android_statistics_face_detect_mode_t,
221        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
222    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
229        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
230    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
231      CAM_FOCUS_UNCALIBRATED },
232    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
233      CAM_FOCUS_APPROXIMATE },
234    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
235      CAM_FOCUS_CALIBRATED }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239        camera_metadata_enum_android_lens_state_t,
240        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
241    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
242    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
243};
244
245const int32_t available_thumbnail_sizes[] = {0, 0,
246                                             176, 144,
247                                             320, 240,
248                                             432, 288,
249                                             480, 288,
250                                             512, 288,
251                                             512, 384};
252
253const cam_dimension_t default_hfr_video_sizes[] = {
254    { 3840, 2160 },
255    { 1920, 1080 },
256    { 1280,  720 },
257    {  640,  480 },
258    {  480,  320 }
259};
260
261
262const QCamera3HardwareInterface::QCameraMap<
263        camera_metadata_enum_android_sensor_test_pattern_mode_t,
264        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
265    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
266    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
267    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
268    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
269    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
270    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
271};
272
273/* Since there is no mapping for all the options some Android enum are not listed.
274 * Also, the order in this list is important because while mapping from HAL to Android it will
275 * traverse from lower to higher index which means that for HAL values that are map to different
276 * Android values, the traverse logic will select the first one found.
277 */
278const QCamera3HardwareInterface::QCameraMap<
279        camera_metadata_enum_android_sensor_reference_illuminant1_t,
280        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
289    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
290    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
291    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
292    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
293    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
294    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
295    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
296    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
301    { 60, CAM_HFR_MODE_60FPS},
302    { 90, CAM_HFR_MODE_90FPS},
303    { 120, CAM_HFR_MODE_120FPS},
304    { 150, CAM_HFR_MODE_150FPS},
305    { 180, CAM_HFR_MODE_180FPS},
306    { 210, CAM_HFR_MODE_210FPS},
307    { 240, CAM_HFR_MODE_240FPS},
308    { 480, CAM_HFR_MODE_480FPS},
309};
310
311camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
312    .initialize                         = QCamera3HardwareInterface::initialize,
313    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
314    .register_stream_buffers            = NULL,
315    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
316    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
317    .get_metadata_vendor_tag_ops        = NULL,
318    .dump                               = QCamera3HardwareInterface::dump,
319    .flush                              = QCamera3HardwareInterface::flush,
320    .reserved                           = {0},
321};
322
323/*===========================================================================
324 * FUNCTION   : QCamera3HardwareInterface
325 *
326 * DESCRIPTION: constructor of QCamera3HardwareInterface
327 *
328 * PARAMETERS :
329 *   @cameraId  : camera ID
330 *
331 * RETURN     : none
332 *==========================================================================*/
333QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
334        const camera_module_callbacks_t *callbacks)
335    : mCameraId(cameraId),
336      mCameraHandle(NULL),
337      mCameraInitialized(false),
338      mCallbackOps(NULL),
339      mMetadataChannel(NULL),
340      mPictureChannel(NULL),
341      mRawChannel(NULL),
342      mSupportChannel(NULL),
343      mAnalysisChannel(NULL),
344      mRawDumpChannel(NULL),
345      mDummyBatchChannel(NULL),
346      mChannelHandle(0),
347      mFirstConfiguration(true),
348      mFlush(false),
349      mFlushPerf(false),
350      mParamHeap(NULL),
351      mParameters(NULL),
352      mPrevParameters(NULL),
353      m_bIsVideo(false),
354      m_bIs4KVideo(false),
355      m_bEisSupportedSize(false),
356      m_bEisEnable(false),
357      m_MobicatMask(0),
358      mMinProcessedFrameDuration(0),
359      mMinJpegFrameDuration(0),
360      mMinRawFrameDuration(0),
361      mMetaFrameCount(0U),
362      mUpdateDebugLevel(false),
363      mCallbacks(callbacks),
364      mCaptureIntent(0),
365      mCacMode(0),
366      mBatchSize(0),
367      mToBeQueuedVidBufs(0),
368      mHFRVideoFps(DEFAULT_VIDEO_FPS),
369      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
370      mFirstFrameNumberInBatch(0),
371      mNeedSensorRestart(false),
372      mLdafCalibExist(false),
373      mPowerHintEnabled(false),
374      mLastCustIntentFrmNum(-1),
375      mState(CLOSED)
376{
377    getLogLevel();
378    m_perfLock.lock_init();
379    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
380    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
381    mCameraDevice.common.close = close_camera_device;
382    mCameraDevice.ops = &mCameraOps;
383    mCameraDevice.priv = this;
384    gCamCapability[cameraId]->version = CAM_HAL_V3;
385    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
386    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
387    gCamCapability[cameraId]->min_num_pp_bufs = 3;
388
389    pthread_cond_init(&mBuffersCond, NULL);
390
391    pthread_cond_init(&mRequestCond, NULL);
392    mPendingLiveRequest = 0;
393    mCurrentRequestId = -1;
394    pthread_mutex_init(&mMutex, NULL);
395
396    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
397        mDefaultMetadata[i] = NULL;
398
399    // Getting system props of different kinds
400    char prop[PROPERTY_VALUE_MAX];
401    memset(prop, 0, sizeof(prop));
402    property_get("persist.camera.raw.dump", prop, "0");
403    mEnableRawDump = atoi(prop);
404    if (mEnableRawDump)
405        LOGD("Raw dump from Camera HAL enabled");
406
407    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
408    memset(mLdafCalib, 0, sizeof(mLdafCalib));
409
410    memset(prop, 0, sizeof(prop));
411    property_get("persist.camera.tnr.preview", prop, "0");
412    m_bTnrPreview = (uint8_t)atoi(prop);
413
414    memset(prop, 0, sizeof(prop));
415    property_get("persist.camera.tnr.video", prop, "0");
416    m_bTnrVideo = (uint8_t)atoi(prop);
417
418    //Load and read GPU library.
419    lib_surface_utils = NULL;
420    LINK_get_surface_pixel_alignment = NULL;
421    mSurfaceStridePadding = CAM_PAD_TO_32;
422    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
423    if (lib_surface_utils) {
424        *(void **)&LINK_get_surface_pixel_alignment =
425                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
426         if (LINK_get_surface_pixel_alignment) {
427             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
428         }
429         dlclose(lib_surface_utils);
430    }
431}
432
433/*===========================================================================
434 * FUNCTION   : ~QCamera3HardwareInterface
435 *
436 * DESCRIPTION: destructor of QCamera3HardwareInterface
437 *
438 * PARAMETERS : none
439 *
440 * RETURN     : none
441 *==========================================================================*/
442QCamera3HardwareInterface::~QCamera3HardwareInterface()
443{
444    LOGD("E");
445
446    /* Turn off current power hint before acquiring perfLock in case they
447     * conflict with each other */
448    disablePowerHint();
449
450    m_perfLock.lock_acq();
451
452    /* We need to stop all streams before deleting any stream */
453    if (mRawDumpChannel) {
454        mRawDumpChannel->stop();
455    }
456
457    // NOTE: 'camera3_stream_t *' objects are already freed at
458    //        this stage by the framework
459    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
460        it != mStreamInfo.end(); it++) {
461        QCamera3ProcessingChannel *channel = (*it)->channel;
462        if (channel) {
463            channel->stop();
464        }
465    }
466    if (mSupportChannel)
467        mSupportChannel->stop();
468
469    if (mAnalysisChannel) {
470        mAnalysisChannel->stop();
471    }
472    if (mMetadataChannel) {
473        mMetadataChannel->stop();
474    }
475    if (mChannelHandle) {
476        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
477                mChannelHandle);
478        LOGD("stopping channel %d", mChannelHandle);
479    }
480
481    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
482        it != mStreamInfo.end(); it++) {
483        QCamera3ProcessingChannel *channel = (*it)->channel;
484        if (channel)
485            delete channel;
486        free (*it);
487    }
488    if (mSupportChannel) {
489        delete mSupportChannel;
490        mSupportChannel = NULL;
491    }
492
493    if (mAnalysisChannel) {
494        delete mAnalysisChannel;
495        mAnalysisChannel = NULL;
496    }
497    if (mRawDumpChannel) {
498        delete mRawDumpChannel;
499        mRawDumpChannel = NULL;
500    }
501    if (mDummyBatchChannel) {
502        delete mDummyBatchChannel;
503        mDummyBatchChannel = NULL;
504    }
505    mPictureChannel = NULL;
506
507    if (mMetadataChannel) {
508        delete mMetadataChannel;
509        mMetadataChannel = NULL;
510    }
511
512    /* Clean up all channels */
513    if (mCameraInitialized) {
514        if(!mFirstConfiguration){
515            //send the last unconfigure
516            cam_stream_size_info_t stream_config_info;
517            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
518            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
519            stream_config_info.buffer_info.max_buffers =
520                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
521            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
522                    stream_config_info);
523            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
524            if (rc < 0) {
525                LOGE("set_parms failed for unconfigure");
526            }
527        }
528        deinitParameters();
529    }
530
531    if (mChannelHandle) {
532        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
533                mChannelHandle);
534        LOGH("deleting channel %d", mChannelHandle);
535        mChannelHandle = 0;
536    }
537
538    if (mState != CLOSED)
539        closeCamera();
540
541    mPendingBuffersMap.mPendingBufferList.clear();
542    mPendingReprocessResultList.clear();
543    for (pendingRequestIterator i = mPendingRequestsList.begin();
544            i != mPendingRequestsList.end();) {
545        i = erasePendingRequest(i);
546    }
547    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
548        if (mDefaultMetadata[i])
549            free_camera_metadata(mDefaultMetadata[i]);
550
551    m_perfLock.lock_rel();
552    m_perfLock.lock_deinit();
553
554    pthread_cond_destroy(&mRequestCond);
555
556    pthread_cond_destroy(&mBuffersCond);
557
558    pthread_mutex_destroy(&mMutex);
559    LOGD("X");
560}
561
562/*===========================================================================
563 * FUNCTION   : erasePendingRequest
564 *
565 * DESCRIPTION: function to erase a desired pending request after freeing any
566 *              allocated memory
567 *
568 * PARAMETERS :
569 *   @i       : iterator pointing to pending request to be erased
570 *
571 * RETURN     : iterator pointing to the next request
572 *==========================================================================*/
573QCamera3HardwareInterface::pendingRequestIterator
574        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
575{
576    if (i->input_buffer != NULL) {
577        free(i->input_buffer);
578        i->input_buffer = NULL;
579    }
580    if (i->settings != NULL)
581        free_camera_metadata((camera_metadata_t*)i->settings);
582    return mPendingRequestsList.erase(i);
583}
584
585/*===========================================================================
586 * FUNCTION   : camEvtHandle
587 *
588 * DESCRIPTION: Function registered to mm-camera-interface to handle events
589 *
590 * PARAMETERS :
591 *   @camera_handle : interface layer camera handle
592 *   @evt           : ptr to event
593 *   @user_data     : user data ptr
594 *
595 * RETURN     : none
596 *==========================================================================*/
597void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
598                                          mm_camera_event_t *evt,
599                                          void *user_data)
600{
601    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
602    if (obj && evt) {
603        switch(evt->server_event_type) {
604            case CAM_EVENT_TYPE_DAEMON_DIED:
605                pthread_mutex_lock(&obj->mMutex);
606                obj->mState = ERROR;
607                pthread_mutex_unlock(&obj->mMutex);
608                LOGE("Fatal, camera daemon died");
609                break;
610
611            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
612                LOGD("HAL got request pull from Daemon");
613                pthread_mutex_lock(&obj->mMutex);
614                obj->mWokenUpByDaemon = true;
615                obj->unblockRequestIfNecessary();
616                pthread_mutex_unlock(&obj->mMutex);
617                break;
618
619            default:
620                LOGW("Warning: Unhandled event %d",
621                        evt->server_event_type);
622                break;
623        }
624    } else {
625        LOGE("NULL user_data/evt");
626    }
627}
628
629/*===========================================================================
630 * FUNCTION   : openCamera
631 *
632 * DESCRIPTION: open camera
633 *
634 * PARAMETERS :
635 *   @hw_device  : double ptr for camera device struct
636 *
637 * RETURN     : int32_t type of status
638 *              NO_ERROR  -- success
639 *              none-zero failure code
640 *==========================================================================*/
641int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
642{
643    int rc = 0;
644    if (mState != CLOSED) {
645        *hw_device = NULL;
646        return PERMISSION_DENIED;
647    }
648
649    m_perfLock.lock_acq();
650    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
651             mCameraId);
652
653    rc = openCamera();
654    if (rc == 0) {
655        *hw_device = &mCameraDevice.common;
656    } else
657        *hw_device = NULL;
658
659    m_perfLock.lock_rel();
660    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
661             mCameraId, rc);
662
663    if (rc == NO_ERROR) {
664        mState = OPENED;
665    }
666    return rc;
667}
668
669/*===========================================================================
670 * FUNCTION   : openCamera
671 *
672 * DESCRIPTION: open camera
673 *
674 * PARAMETERS : none
675 *
676 * RETURN     : int32_t type of status
677 *              NO_ERROR  -- success
678 *              none-zero failure code
679 *==========================================================================*/
680int QCamera3HardwareInterface::openCamera()
681{
682    int rc = 0;
683    char value[PROPERTY_VALUE_MAX];
684
685    KPI_ATRACE_CALL();
686    if (mCameraHandle) {
687        LOGE("Failure: Camera already opened");
688        return ALREADY_EXISTS;
689    }
690
691    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
692    if (rc < 0) {
693        LOGE("Failed to reserve flash for camera id: %d",
694                mCameraId);
695        return UNKNOWN_ERROR;
696    }
697
698    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
699    if (rc) {
700        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
701        return rc;
702    }
703
704    if (!mCameraHandle) {
705        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
706        return -ENODEV;
707    }
708
709    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
710            camEvtHandle, (void *)this);
711
712    if (rc < 0) {
713        LOGE("Error, failed to register event callback");
714        /* Not closing camera here since it is already handled in destructor */
715        return FAILED_TRANSACTION;
716    }
717
718    mExifParams.debug_params =
719            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
720    if (mExifParams.debug_params) {
721        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
722    } else {
723        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
724        return NO_MEMORY;
725    }
726    mFirstConfiguration = true;
727
728    //Notify display HAL that a camera session is active.
729    //But avoid calling the same during bootup because camera service might open/close
730    //cameras at boot time during its initialization and display service will also internally
731    //wait for camera service to initialize first while calling this display API, resulting in a
732    //deadlock situation. Since boot time camera open/close calls are made only to fetch
733    //capabilities, no need of this display bw optimization.
734    //Use "service.bootanim.exit" property to know boot status.
735    property_get("service.bootanim.exit", value, "0");
736    if (atoi(value) == 1) {
737        pthread_mutex_lock(&gCamLock);
738        if (gNumCameraSessions++ == 0) {
739            setCameraLaunchStatus(true);
740        }
741        pthread_mutex_unlock(&gCamLock);
742    }
743
744    return NO_ERROR;
745}
746
747/*===========================================================================
748 * FUNCTION   : closeCamera
749 *
750 * DESCRIPTION: close camera
751 *
752 * PARAMETERS : none
753 *
754 * RETURN     : int32_t type of status
755 *              NO_ERROR  -- success
756 *              none-zero failure code
757 *==========================================================================*/
758int QCamera3HardwareInterface::closeCamera()
759{
760    KPI_ATRACE_CALL();
761    int rc = NO_ERROR;
762    char value[PROPERTY_VALUE_MAX];
763
764    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
765             mCameraId);
766    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
767    mCameraHandle = NULL;
768
769    //Notify display HAL that there is no active camera session
770    //but avoid calling the same during bootup. Refer to openCamera
771    //for more details.
772    property_get("service.bootanim.exit", value, "0");
773    if (atoi(value) == 1) {
774        pthread_mutex_lock(&gCamLock);
775        if (--gNumCameraSessions == 0) {
776            setCameraLaunchStatus(false);
777        }
778        pthread_mutex_unlock(&gCamLock);
779    }
780
781    if (mExifParams.debug_params) {
782        free(mExifParams.debug_params);
783        mExifParams.debug_params = NULL;
784    }
785    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
786        LOGW("Failed to release flash for camera id: %d",
787                mCameraId);
788    }
789    mState = CLOSED;
790    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
791         mCameraId, rc);
792    return rc;
793}
794
795/*===========================================================================
796 * FUNCTION   : initialize
797 *
798 * DESCRIPTION: Initialize frameworks callback functions
799 *
800 * PARAMETERS :
801 *   @callback_ops : callback function to frameworks
802 *
803 * RETURN     :
804 *
805 *==========================================================================*/
806int QCamera3HardwareInterface::initialize(
807        const struct camera3_callback_ops *callback_ops)
808{
809    ATRACE_CALL();
810    int rc;
811
812    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
813    pthread_mutex_lock(&mMutex);
814
815    // Validate current state
816    switch (mState) {
817        case OPENED:
818            /* valid state */
819            break;
820
821        case ERROR:
822            pthread_mutex_unlock(&mMutex);
823            handleCameraDeviceError();
824            rc = -ENODEV;
825            goto err2;
826
827        default:
828            LOGE("Invalid state %d", mState);
829            rc = -ENODEV;
830            goto err1;
831    }
832
833    rc = initParameters();
834    if (rc < 0) {
835        LOGE("initParamters failed %d", rc);
836        goto err1;
837    }
838    mCallbackOps = callback_ops;
839
840    mChannelHandle = mCameraHandle->ops->add_channel(
841            mCameraHandle->camera_handle, NULL, NULL, this);
842    if (mChannelHandle == 0) {
843        LOGE("add_channel failed");
844        rc = -ENOMEM;
845        pthread_mutex_unlock(&mMutex);
846        return rc;
847    }
848
849    pthread_mutex_unlock(&mMutex);
850    mCameraInitialized = true;
851    mState = INITIALIZED;
852    LOGI("X");
853    return 0;
854
855err1:
856    pthread_mutex_unlock(&mMutex);
857err2:
858    return rc;
859}
860
861/*===========================================================================
862 * FUNCTION   : validateStreamDimensions
863 *
864 * DESCRIPTION: Check if the configuration requested are those advertised
865 *
866 * PARAMETERS :
867 *   @stream_list : streams to be configured
868 *
869 * RETURN     :
870 *
871 *==========================================================================*/
872int QCamera3HardwareInterface::validateStreamDimensions(
873        camera3_stream_configuration_t *streamList)
874{
875    int rc = NO_ERROR;
876    size_t count = 0;
877
878    camera3_stream_t *inputStream = NULL;
879    /*
880    * Loop through all streams to find input stream if it exists*
881    */
882    for (size_t i = 0; i< streamList->num_streams; i++) {
883        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
884            if (inputStream != NULL) {
885                LOGE("Error, Multiple input streams requested");
886                return -EINVAL;
887            }
888            inputStream = streamList->streams[i];
889        }
890    }
891    /*
892    * Loop through all streams requested in configuration
893    * Check if unsupported sizes have been requested on any of them
894    */
895    for (size_t j = 0; j < streamList->num_streams; j++) {
896        bool sizeFound = false;
897        camera3_stream_t *newStream = streamList->streams[j];
898
899        uint32_t rotatedHeight = newStream->height;
900        uint32_t rotatedWidth = newStream->width;
901        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
902                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
903            rotatedHeight = newStream->width;
904            rotatedWidth = newStream->height;
905        }
906
907        /*
908        * Sizes are different for each type of stream format check against
909        * appropriate table.
910        */
911        switch (newStream->format) {
912        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
913        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
914        case HAL_PIXEL_FORMAT_RAW10:
915            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
916            for (size_t i = 0; i < count; i++) {
917                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
918                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
919                    sizeFound = true;
920                    break;
921                }
922            }
923            break;
924        case HAL_PIXEL_FORMAT_BLOB:
925            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
926            /* Verify set size against generated sizes table */
927            for (size_t i = 0; i < count; i++) {
928                if (((int32_t)rotatedWidth ==
929                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
930                        ((int32_t)rotatedHeight ==
931                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
932                    sizeFound = true;
933                    break;
934                }
935            }
936            break;
937        case HAL_PIXEL_FORMAT_YCbCr_420_888:
938        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
939        default:
940            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
941                    || newStream->stream_type == CAMERA3_STREAM_INPUT
942                    || IS_USAGE_ZSL(newStream->usage)) {
943                if (((int32_t)rotatedWidth ==
944                                gCamCapability[mCameraId]->active_array_size.width) &&
945                                ((int32_t)rotatedHeight ==
946                                gCamCapability[mCameraId]->active_array_size.height)) {
947                    sizeFound = true;
948                    break;
949                }
950                /* We could potentially break here to enforce ZSL stream
951                 * set from frameworks always is full active array size
952                 * but it is not clear from the spc if framework will always
953                 * follow that, also we have logic to override to full array
954                 * size, so keeping the logic lenient at the moment
955                 */
956            }
957            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
958                    MAX_SIZES_CNT);
959            for (size_t i = 0; i < count; i++) {
960                if (((int32_t)rotatedWidth ==
961                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
962                            ((int32_t)rotatedHeight ==
963                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
964                    sizeFound = true;
965                    break;
966                }
967            }
968            break;
969        } /* End of switch(newStream->format) */
970
971        /* We error out even if a single stream has unsupported size set */
972        if (!sizeFound) {
973            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
974                    rotatedWidth, rotatedHeight, newStream->format,
975                    gCamCapability[mCameraId]->active_array_size.width,
976                    gCamCapability[mCameraId]->active_array_size.height);
977            rc = -EINVAL;
978            break;
979        }
980    } /* End of for each stream */
981    return rc;
982}
983
984/*==============================================================================
985 * FUNCTION   : isSupportChannelNeeded
986 *
987 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
988 *
989 * PARAMETERS :
990 *   @stream_list : streams to be configured
991 *   @stream_config_info : the config info for streams to be configured
992 *
993 * RETURN     : Boolen true/false decision
994 *
995 *==========================================================================*/
996bool QCamera3HardwareInterface::isSupportChannelNeeded(
997        camera3_stream_configuration_t *streamList,
998        cam_stream_size_info_t stream_config_info)
999{
1000    uint32_t i;
1001    bool pprocRequested = false;
1002    /* Check for conditions where PProc pipeline does not have any streams*/
1003    for (i = 0; i < stream_config_info.num_streams; i++) {
1004        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1005                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1006            pprocRequested = true;
1007            break;
1008        }
1009    }
1010
1011    if (pprocRequested == false )
1012        return true;
1013
1014    /* Dummy stream needed if only raw or jpeg streams present */
1015    for (i = 0; i < streamList->num_streams; i++) {
1016        switch(streamList->streams[i]->format) {
1017            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1018            case HAL_PIXEL_FORMAT_RAW10:
1019            case HAL_PIXEL_FORMAT_RAW16:
1020            case HAL_PIXEL_FORMAT_BLOB:
1021                break;
1022            default:
1023                return false;
1024        }
1025    }
1026    return true;
1027}
1028
1029/*==============================================================================
1030 * FUNCTION   : getSensorOutputSize
1031 *
1032 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1033 *
1034 * PARAMETERS :
1035 *   @sensor_dim : sensor output dimension (output)
1036 *
1037 * RETURN     : int32_t type of status
1038 *              NO_ERROR  -- success
1039 *              none-zero failure code
1040 *
1041 *==========================================================================*/
1042int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1043{
1044    int32_t rc = NO_ERROR;
1045
1046    cam_dimension_t max_dim = {0, 0};
1047    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1048        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1049            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1050        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1051            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1052    }
1053
1054    clear_metadata_buffer(mParameters);
1055
1056    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1057            max_dim);
1058    if (rc != NO_ERROR) {
1059        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1060        return rc;
1061    }
1062
1063    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1064    if (rc != NO_ERROR) {
1065        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1066        return rc;
1067    }
1068
1069    clear_metadata_buffer(mParameters);
1070    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1071
1072    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1073            mParameters);
1074    if (rc != NO_ERROR) {
1075        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1076        return rc;
1077    }
1078
1079    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1080    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1081
1082    return rc;
1083}
1084
1085/*==============================================================================
1086 * FUNCTION   : enablePowerHint
1087 *
1088 * DESCRIPTION: enable single powerhint for preview and different video modes.
1089 *
1090 * PARAMETERS :
1091 *
1092 * RETURN     : NULL
1093 *
1094 *==========================================================================*/
1095void QCamera3HardwareInterface::enablePowerHint()
1096{
1097    if (!mPowerHintEnabled) {
1098        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1099        mPowerHintEnabled = true;
1100    }
1101}
1102
1103/*==============================================================================
1104 * FUNCTION   : disablePowerHint
1105 *
1106 * DESCRIPTION: disable current powerhint.
1107 *
1108 * PARAMETERS :
1109 *
1110 * RETURN     : NULL
1111 *
1112 *==========================================================================*/
1113void QCamera3HardwareInterface::disablePowerHint()
1114{
1115    if (mPowerHintEnabled) {
1116        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1117        mPowerHintEnabled = false;
1118    }
1119}
1120
1121/*==============================================================================
1122 * FUNCTION   : addToPPFeatureMask
1123 *
1124 * DESCRIPTION: add additional features to pp feature mask based on
1125 *              stream type and usecase
1126 *
1127 * PARAMETERS :
1128 *   @stream_format : stream type for feature mask
1129 *   @stream_idx : stream idx within postprocess_mask list to change
1130 *
1131 * RETURN     : NULL
1132 *
1133 *==========================================================================*/
1134void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1135        uint32_t stream_idx)
1136{
1137    char feature_mask_value[PROPERTY_VALUE_MAX];
1138    uint32_t feature_mask;
1139    int args_converted;
1140    int property_len;
1141
1142    /* Get feature mask from property */
1143    property_len = property_get("persist.camera.hal3.feature",
1144            feature_mask_value, "0");
1145    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1146            (feature_mask_value[1] == 'x')) {
1147        args_converted = sscanf(feature_mask_value, "0x%x", &feature_mask);
1148    } else {
1149        args_converted = sscanf(feature_mask_value, "%d", &feature_mask);
1150    }
1151    if (1 != args_converted) {
1152        feature_mask = 0;
1153        LOGE("Wrong feature mask %s", feature_mask_value);
1154        return;
1155    }
1156
1157    switch (stream_format) {
1158    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1159        /* Add LLVD to pp feature mask only if video hint is enabled */
1160        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1161            mStreamConfigInfo.postprocess_mask[stream_idx]
1162                    |= CAM_QTI_FEATURE_SW_TNR;
1163            LOGH("Added SW TNR to pp feature mask");
1164        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1165            mStreamConfigInfo.postprocess_mask[stream_idx]
1166                    |= CAM_QCOM_FEATURE_LLVD;
1167            LOGH("Added LLVD SeeMore to pp feature mask");
1168        }
1169        break;
1170    }
1171    default:
1172        break;
1173    }
1174    LOGD("PP feature mask %x",
1175            mStreamConfigInfo.postprocess_mask[stream_idx]);
1176}
1177
1178/*==============================================================================
1179 * FUNCTION   : updateFpsInPreviewBuffer
1180 *
1181 * DESCRIPTION: update FPS information in preview buffer.
1182 *
1183 * PARAMETERS :
1184 *   @metadata    : pointer to metadata buffer
1185 *   @frame_number: frame_number to look for in pending buffer list
1186 *
1187 * RETURN     : None
1188 *
1189 *==========================================================================*/
1190void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1191        uint32_t frame_number)
1192{
1193    // Mark all pending buffers for this particular request
1194    // with corresponding framerate information
1195    for (List<PendingBufferInfo>::iterator j =
1196            mPendingBuffersMap.mPendingBufferList.begin();
1197            j != mPendingBuffersMap.mPendingBufferList.end(); j++) {
1198        QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1199        if ((j->frame_number == frame_number) &&
1200                (channel->getStreamTypeMask() &
1201                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1202            IF_META_AVAILABLE(cam_fps_range_t, float_range,
1203                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1204                int32_t cameraFps = float_range->max_fps;
1205                struct private_handle_t *priv_handle =
1206                        (struct private_handle_t *)(*(j->buffer));
1207                setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1208            }
1209        }
1210    }
1211}
1212
1213/*===========================================================================
1214 * FUNCTION   : configureStreams
1215 *
1216 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1217 *              and output streams.
1218 *
1219 * PARAMETERS :
1220 *   @stream_list : streams to be configured
1221 *
1222 * RETURN     :
1223 *
1224 *==========================================================================*/
1225int QCamera3HardwareInterface::configureStreams(
1226        camera3_stream_configuration_t *streamList)
1227{
1228    ATRACE_CALL();
1229    int rc = 0;
1230
1231    // Acquire perfLock before configure streams
1232    m_perfLock.lock_acq();
1233    rc = configureStreamsPerfLocked(streamList);
1234    m_perfLock.lock_rel();
1235
1236    return rc;
1237}
1238
1239/*===========================================================================
1240 * FUNCTION   : configureStreamsPerfLocked
1241 *
1242 * DESCRIPTION: configureStreams while perfLock is held.
1243 *
1244 * PARAMETERS :
1245 *   @stream_list : streams to be configured
1246 *
1247 * RETURN     : int32_t type of status
1248 *              NO_ERROR  -- success
1249 *              none-zero failure code
1250 *==========================================================================*/
1251int QCamera3HardwareInterface::configureStreamsPerfLocked(
1252        camera3_stream_configuration_t *streamList)
1253{
1254    ATRACE_CALL();
1255    int rc = 0;
1256
1257    // Sanity check stream_list
1258    if (streamList == NULL) {
1259        LOGE("NULL stream configuration");
1260        return BAD_VALUE;
1261    }
1262    if (streamList->streams == NULL) {
1263        LOGE("NULL stream list");
1264        return BAD_VALUE;
1265    }
1266
1267    if (streamList->num_streams < 1) {
1268        LOGE("Bad number of streams requested: %d",
1269                streamList->num_streams);
1270        return BAD_VALUE;
1271    }
1272
1273    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1274        LOGE("Maximum number of streams %d exceeded: %d",
1275                MAX_NUM_STREAMS, streamList->num_streams);
1276        return BAD_VALUE;
1277    }
1278
1279    mOpMode = streamList->operation_mode;
1280    LOGD("mOpMode: %d", mOpMode);
1281
1282    /* first invalidate all the steams in the mStreamList
1283     * if they appear again, they will be validated */
1284    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1285            it != mStreamInfo.end(); it++) {
1286        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1287        channel->stop();
1288        (*it)->status = INVALID;
1289    }
1290
1291    if (mRawDumpChannel) {
1292        mRawDumpChannel->stop();
1293        delete mRawDumpChannel;
1294        mRawDumpChannel = NULL;
1295    }
1296
1297    if (mSupportChannel)
1298        mSupportChannel->stop();
1299
1300    if (mAnalysisChannel) {
1301        mAnalysisChannel->stop();
1302    }
1303    if (mMetadataChannel) {
1304        /* If content of mStreamInfo is not 0, there is metadata stream */
1305        mMetadataChannel->stop();
1306    }
1307    if (mChannelHandle) {
1308        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1309                mChannelHandle);
1310        LOGD("stopping channel %d", mChannelHandle);
1311    }
1312
1313    pthread_mutex_lock(&mMutex);
1314
1315    // Check state
1316    switch (mState) {
1317        case INITIALIZED:
1318        case CONFIGURED:
1319        case STARTED:
1320            /* valid state */
1321            break;
1322
1323        case ERROR:
1324            pthread_mutex_unlock(&mMutex);
1325            handleCameraDeviceError();
1326            return -ENODEV;
1327
1328        default:
1329            LOGE("Invalid state %d", mState);
1330            pthread_mutex_unlock(&mMutex);
1331            return -ENODEV;
1332    }
1333
1334    /* Check whether we have video stream */
1335    m_bIs4KVideo = false;
1336    m_bIsVideo = false;
1337    m_bEisSupportedSize = false;
1338    m_bTnrEnabled = false;
1339    bool isZsl = false;
1340    uint32_t videoWidth = 0U;
1341    uint32_t videoHeight = 0U;
1342    size_t rawStreamCnt = 0;
1343    size_t stallStreamCnt = 0;
1344    size_t processedStreamCnt = 0;
1345    // Number of streams on ISP encoder path
1346    size_t numStreamsOnEncoder = 0;
1347    size_t numYuv888OnEncoder = 0;
1348    bool bYuv888OverrideJpeg = false;
1349    cam_dimension_t largeYuv888Size = {0, 0};
1350    cam_dimension_t maxViewfinderSize = {0, 0};
1351    bool bJpegExceeds4K = false;
1352    bool bUseCommonFeatureMask = false;
1353    uint32_t commonFeatureMask = 0;
1354    bool bSmallJpegSize = false;
1355    uint32_t width_ratio;
1356    uint32_t height_ratio;
1357    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1358    camera3_stream_t *inputStream = NULL;
1359    bool isJpeg = false;
1360    cam_dimension_t jpegSize = {0, 0};
1361
1362    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1363
1364    /*EIS configuration*/
1365    bool eisSupported = false;
1366    bool oisSupported = false;
1367    int32_t margin_index = -1;
1368    uint8_t eis_prop_set;
1369    uint32_t maxEisWidth = 0;
1370    uint32_t maxEisHeight = 0;
1371
1372    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1373
1374    size_t count = IS_TYPE_MAX;
1375    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1376    for (size_t i = 0; i < count; i++) {
1377        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1378            eisSupported = true;
1379            margin_index = (int32_t)i;
1380            break;
1381        }
1382    }
1383
1384    count = CAM_OPT_STAB_MAX;
1385    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1386    for (size_t i = 0; i < count; i++) {
1387        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1388            oisSupported = true;
1389            break;
1390        }
1391    }
1392
1393    if (eisSupported) {
1394        maxEisWidth = MAX_EIS_WIDTH;
1395        maxEisHeight = MAX_EIS_HEIGHT;
1396    }
1397
1398    /* EIS setprop control */
1399    char eis_prop[PROPERTY_VALUE_MAX];
1400    memset(eis_prop, 0, sizeof(eis_prop));
1401    property_get("persist.camera.eis.enable", eis_prop, "0");
1402    eis_prop_set = (uint8_t)atoi(eis_prop);
1403
1404    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1405            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1406
1407    /* stream configurations */
1408    for (size_t i = 0; i < streamList->num_streams; i++) {
1409        camera3_stream_t *newStream = streamList->streams[i];
1410        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1411                "height = %d, rotation = %d, usage = 0x%x",
1412                 i, newStream->stream_type, newStream->format,
1413                newStream->width, newStream->height, newStream->rotation,
1414                newStream->usage);
1415        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1416                newStream->stream_type == CAMERA3_STREAM_INPUT){
1417            isZsl = true;
1418        }
1419        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1420            inputStream = newStream;
1421        }
1422
1423        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1424            isJpeg = true;
1425            jpegSize.width = newStream->width;
1426            jpegSize.height = newStream->height;
1427            if (newStream->width > VIDEO_4K_WIDTH ||
1428                    newStream->height > VIDEO_4K_HEIGHT)
1429                bJpegExceeds4K = true;
1430        }
1431
1432        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1433                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1434            m_bIsVideo = true;
1435            videoWidth = newStream->width;
1436            videoHeight = newStream->height;
1437            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1438                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1439                m_bIs4KVideo = true;
1440            }
1441            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1442                                  (newStream->height <= maxEisHeight);
1443        }
1444        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1445                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1446            switch (newStream->format) {
1447            case HAL_PIXEL_FORMAT_BLOB:
1448                stallStreamCnt++;
1449                if (isOnEncoder(maxViewfinderSize, newStream->width,
1450                        newStream->height)) {
1451                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1452                    numStreamsOnEncoder++;
1453                }
1454                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1455                        newStream->width);
1456                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1457                        newStream->height);;
1458                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1459                        "FATAL: max_downscale_factor cannot be zero and so assert");
1460                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1461                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1462                    LOGH("Setting small jpeg size flag to true");
1463                    bSmallJpegSize = true;
1464                }
1465                break;
1466            case HAL_PIXEL_FORMAT_RAW10:
1467            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1468            case HAL_PIXEL_FORMAT_RAW16:
1469                rawStreamCnt++;
1470                break;
1471            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1472                processedStreamCnt++;
1473                if (isOnEncoder(maxViewfinderSize, newStream->width,
1474                        newStream->height)) {
1475                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1476                            IS_USAGE_ZSL(newStream->usage)) {
1477                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1478                    } else {
1479                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1480                    }
1481                    numStreamsOnEncoder++;
1482                }
1483                break;
1484            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1485                processedStreamCnt++;
1486                if (isOnEncoder(maxViewfinderSize, newStream->width,
1487                        newStream->height)) {
1488                    // If Yuv888 size is not greater than 4K, set feature mask
1489                    // to SUPERSET so that it support concurrent request on
1490                    // YUV and JPEG.
1491                    if (newStream->width <= VIDEO_4K_WIDTH &&
1492                            newStream->height <= VIDEO_4K_HEIGHT) {
1493                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1494                    } else {
1495                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1496                    }
1497                    numStreamsOnEncoder++;
1498                    numYuv888OnEncoder++;
1499                    largeYuv888Size.width = newStream->width;
1500                    largeYuv888Size.height = newStream->height;
1501                }
1502                break;
1503            default:
1504                processedStreamCnt++;
1505                if (isOnEncoder(maxViewfinderSize, newStream->width,
1506                        newStream->height)) {
1507                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1508                    numStreamsOnEncoder++;
1509                }
1510                break;
1511            }
1512
1513        }
1514    }
1515
1516    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1517        !m_bIsVideo) {
1518        m_bEisEnable = false;
1519    }
1520
1521    /* Logic to enable/disable TNR based on specific config size/etc.*/
1522    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1523            ((videoWidth == 1920 && videoHeight == 1080) ||
1524            (videoWidth == 1280 && videoHeight == 720)) &&
1525            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1526        m_bTnrEnabled = true;
1527
1528    /* Check if num_streams is sane */
1529    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1530            rawStreamCnt > MAX_RAW_STREAMS ||
1531            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1532        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1533                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1534        pthread_mutex_unlock(&mMutex);
1535        return -EINVAL;
1536    }
1537    /* Check whether we have zsl stream or 4k video case */
1538    if (isZsl && m_bIsVideo) {
1539        LOGE("Currently invalid configuration ZSL&Video!");
1540        pthread_mutex_unlock(&mMutex);
1541        return -EINVAL;
1542    }
1543    /* Check if stream sizes are sane */
1544    if (numStreamsOnEncoder > 2) {
1545        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1546        pthread_mutex_unlock(&mMutex);
1547        return -EINVAL;
1548    } else if (1 < numStreamsOnEncoder){
1549        bUseCommonFeatureMask = true;
1550        LOGH("Multiple streams above max viewfinder size, common mask needed");
1551    }
1552
1553    /* Check if BLOB size is greater than 4k in 4k recording case */
1554    if (m_bIs4KVideo && bJpegExceeds4K) {
1555        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1556        pthread_mutex_unlock(&mMutex);
1557        return -EINVAL;
1558    }
1559
1560    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1561    // the YUV stream's size is greater or equal to the JPEG size, set common
1562    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1563    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1564            jpegSize.width, jpegSize.height) &&
1565            largeYuv888Size.width > jpegSize.width &&
1566            largeYuv888Size.height > jpegSize.height) {
1567        bYuv888OverrideJpeg = true;
1568    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1569        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1570    }
1571
1572    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %x",
1573            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1574            commonFeatureMask);
1575    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1576            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1577
1578    rc = validateStreamDimensions(streamList);
1579    if (rc == NO_ERROR) {
1580        rc = validateStreamRotations(streamList);
1581    }
1582    if (rc != NO_ERROR) {
1583        LOGE("Invalid stream configuration requested!");
1584        pthread_mutex_unlock(&mMutex);
1585        return rc;
1586    }
1587
1588    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1589    camera3_stream_t *jpegStream = NULL;
1590    for (size_t i = 0; i < streamList->num_streams; i++) {
1591        camera3_stream_t *newStream = streamList->streams[i];
1592        LOGH("newStream type = %d, stream format = %d "
1593                "stream size : %d x %d, stream rotation = %d",
1594                 newStream->stream_type, newStream->format,
1595                newStream->width, newStream->height, newStream->rotation);
1596        //if the stream is in the mStreamList validate it
1597        bool stream_exists = false;
1598        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1599                it != mStreamInfo.end(); it++) {
1600            if ((*it)->stream == newStream) {
1601                QCamera3ProcessingChannel *channel =
1602                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1603                stream_exists = true;
1604                if (channel)
1605                    delete channel;
1606                (*it)->status = VALID;
1607                (*it)->stream->priv = NULL;
1608                (*it)->channel = NULL;
1609            }
1610        }
1611        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1612            //new stream
1613            stream_info_t* stream_info;
1614            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1615            if (!stream_info) {
1616               LOGE("Could not allocate stream info");
1617               rc = -ENOMEM;
1618               pthread_mutex_unlock(&mMutex);
1619               return rc;
1620            }
1621            stream_info->stream = newStream;
1622            stream_info->status = VALID;
1623            stream_info->channel = NULL;
1624            mStreamInfo.push_back(stream_info);
1625        }
1626        /* Covers Opaque ZSL and API1 F/W ZSL */
1627        if (IS_USAGE_ZSL(newStream->usage)
1628                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1629            if (zslStream != NULL) {
1630                LOGE("Multiple input/reprocess streams requested!");
1631                pthread_mutex_unlock(&mMutex);
1632                return BAD_VALUE;
1633            }
1634            zslStream = newStream;
1635        }
1636        /* Covers YUV reprocess */
1637        if (inputStream != NULL) {
1638            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1639                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1640                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1641                    && inputStream->width == newStream->width
1642                    && inputStream->height == newStream->height) {
1643                if (zslStream != NULL) {
1644                    /* This scenario indicates multiple YUV streams with same size
1645                     * as input stream have been requested, since zsl stream handle
1646                     * is solely use for the purpose of overriding the size of streams
1647                     * which share h/w streams we will just make a guess here as to
1648                     * which of the stream is a ZSL stream, this will be refactored
1649                     * once we make generic logic for streams sharing encoder output
1650                     */
1651                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1652                }
1653                zslStream = newStream;
1654            }
1655        }
1656        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1657            jpegStream = newStream;
1658        }
1659    }
1660
1661    /* If a zsl stream is set, we know that we have configured at least one input or
1662       bidirectional stream */
1663    if (NULL != zslStream) {
1664        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1665        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1666        mInputStreamInfo.format = zslStream->format;
1667        mInputStreamInfo.usage = zslStream->usage;
1668        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1669                 mInputStreamInfo.dim.width,
1670                mInputStreamInfo.dim.height,
1671                mInputStreamInfo.format, mInputStreamInfo.usage);
1672    }
1673
1674    cleanAndSortStreamInfo();
1675    if (mMetadataChannel) {
1676        delete mMetadataChannel;
1677        mMetadataChannel = NULL;
1678    }
1679    if (mSupportChannel) {
1680        delete mSupportChannel;
1681        mSupportChannel = NULL;
1682    }
1683
1684    if (mAnalysisChannel) {
1685        delete mAnalysisChannel;
1686        mAnalysisChannel = NULL;
1687    }
1688
1689    if (mDummyBatchChannel) {
1690        delete mDummyBatchChannel;
1691        mDummyBatchChannel = NULL;
1692    }
1693
1694    //Create metadata channel and initialize it
1695    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1696                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1697                    &padding_info, CAM_QCOM_FEATURE_NONE, this);
1698    if (mMetadataChannel == NULL) {
1699        LOGE("failed to allocate metadata channel");
1700        rc = -ENOMEM;
1701        pthread_mutex_unlock(&mMutex);
1702        return rc;
1703    }
1704    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1705    if (rc < 0) {
1706        LOGE("metadata channel initialization failed");
1707        delete mMetadataChannel;
1708        mMetadataChannel = NULL;
1709        pthread_mutex_unlock(&mMutex);
1710        return rc;
1711    }
1712
1713    // Create analysis stream all the time, even when h/w support is not available
1714    {
1715        mAnalysisChannel = new QCamera3SupportChannel(
1716                mCameraHandle->camera_handle,
1717                mChannelHandle,
1718                mCameraHandle->ops,
1719                &gCamCapability[mCameraId]->analysis_padding_info,
1720                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1721                CAM_STREAM_TYPE_ANALYSIS,
1722                &gCamCapability[mCameraId]->analysis_recommended_res,
1723                (gCamCapability[mCameraId]->analysis_recommended_format
1724                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1725                : CAM_FORMAT_YUV_420_NV21),
1726                gCamCapability[mCameraId]->hw_analysis_supported,
1727                this,
1728                0); // force buffer count to 0
1729        if (!mAnalysisChannel) {
1730            LOGE("H/W Analysis channel cannot be created");
1731            pthread_mutex_unlock(&mMutex);
1732            return -ENOMEM;
1733        }
1734    }
1735
1736    bool isRawStreamRequested = false;
1737    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1738    /* Allocate channel objects for the requested streams */
1739    for (size_t i = 0; i < streamList->num_streams; i++) {
1740        camera3_stream_t *newStream = streamList->streams[i];
1741        uint32_t stream_usage = newStream->usage;
1742        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1743        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1744        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1745                || IS_USAGE_ZSL(newStream->usage)) &&
1746            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1747            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1748            if (bUseCommonFeatureMask) {
1749                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1750                        commonFeatureMask;
1751            } else {
1752                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1753                        CAM_QCOM_FEATURE_NONE;
1754            }
1755
1756        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1757                LOGH("Input stream configured, reprocess config");
1758        } else {
1759            //for non zsl streams find out the format
1760            switch (newStream->format) {
1761            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1762            {
1763                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1764                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1765                /* add additional features to pp feature mask */
1766                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1767                        mStreamConfigInfo.num_streams);
1768
1769                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1770                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1771                                CAM_STREAM_TYPE_VIDEO;
1772                    if (m_bTnrEnabled && m_bTnrVideo) {
1773                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1774                            CAM_QCOM_FEATURE_CPP_TNR;
1775                    }
1776                } else {
1777                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1778                            CAM_STREAM_TYPE_PREVIEW;
1779                    if (m_bTnrEnabled && m_bTnrPreview) {
1780                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1781                                CAM_QCOM_FEATURE_CPP_TNR;
1782                    }
1783                    padding_info.width_padding = mSurfaceStridePadding;
1784                    padding_info.height_padding = CAM_PAD_TO_2;
1785                }
1786                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1787                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1788                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1789                            newStream->height;
1790                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1791                            newStream->width;
1792                }
1793            }
1794            break;
1795            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1796                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1797                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1798                    if (bUseCommonFeatureMask)
1799                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1800                                commonFeatureMask;
1801                    else
1802                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1803                                CAM_QCOM_FEATURE_NONE;
1804                } else {
1805                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1806                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1807                }
1808            break;
1809            case HAL_PIXEL_FORMAT_BLOB:
1810                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1811                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1812                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1813                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1814                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1815                } else {
1816                    if (bUseCommonFeatureMask &&
1817                            isOnEncoder(maxViewfinderSize, newStream->width,
1818                            newStream->height)) {
1819                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1820                    } else {
1821                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1822                    }
1823                }
1824                if (isZsl) {
1825                    if (zslStream) {
1826                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1827                                (int32_t)zslStream->width;
1828                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1829                                (int32_t)zslStream->height;
1830                    } else {
1831                        LOGE("Error, No ZSL stream identified");
1832                        pthread_mutex_unlock(&mMutex);
1833                        return -EINVAL;
1834                    }
1835                } else if (m_bIs4KVideo) {
1836                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1837                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1838                } else if (bYuv888OverrideJpeg) {
1839                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1840                            (int32_t)largeYuv888Size.width;
1841                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1842                            (int32_t)largeYuv888Size.height;
1843                }
1844                break;
1845            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1846            case HAL_PIXEL_FORMAT_RAW16:
1847            case HAL_PIXEL_FORMAT_RAW10:
1848                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1849                isRawStreamRequested = true;
1850                break;
1851            default:
1852                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1853                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1854                break;
1855            }
1856        }
1857
1858        if (newStream->priv == NULL) {
1859            //New stream, construct channel
1860            switch (newStream->stream_type) {
1861            case CAMERA3_STREAM_INPUT:
1862                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1863                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1864                break;
1865            case CAMERA3_STREAM_BIDIRECTIONAL:
1866                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1867                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1868                break;
1869            case CAMERA3_STREAM_OUTPUT:
1870                /* For video encoding stream, set read/write rarely
1871                 * flag so that they may be set to un-cached */
1872                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1873                    newStream->usage |=
1874                         (GRALLOC_USAGE_SW_READ_RARELY |
1875                         GRALLOC_USAGE_SW_WRITE_RARELY |
1876                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1877                else if (IS_USAGE_ZSL(newStream->usage))
1878                {
1879                    LOGD("ZSL usage flag skipping");
1880                }
1881                else if (newStream == zslStream
1882                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1883                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1884                } else
1885                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1886                break;
1887            default:
1888                LOGE("Invalid stream_type %d", newStream->stream_type);
1889                break;
1890            }
1891
1892            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1893                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1894                QCamera3ProcessingChannel *channel = NULL;
1895                switch (newStream->format) {
1896                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1897                    if ((newStream->usage &
1898                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1899                            (streamList->operation_mode ==
1900                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1901                    ) {
1902                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1903                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1904                                &gCamCapability[mCameraId]->padding_info,
1905                                this,
1906                                newStream,
1907                                (cam_stream_type_t)
1908                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1909                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1910                                mMetadataChannel,
1911                                0); //heap buffers are not required for HFR video channel
1912                        if (channel == NULL) {
1913                            LOGE("allocation of channel failed");
1914                            pthread_mutex_unlock(&mMutex);
1915                            return -ENOMEM;
1916                        }
1917                        //channel->getNumBuffers() will return 0 here so use
1918                        //MAX_INFLIGH_HFR_REQUESTS
1919                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1920                        newStream->priv = channel;
1921                        LOGI("num video buffers in HFR mode: %d",
1922                                 MAX_INFLIGHT_HFR_REQUESTS);
1923                    } else {
1924                        /* Copy stream contents in HFR preview only case to create
1925                         * dummy batch channel so that sensor streaming is in
1926                         * HFR mode */
1927                        if (!m_bIsVideo && (streamList->operation_mode ==
1928                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1929                            mDummyBatchStream = *newStream;
1930                        }
1931                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1932                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1933                                &gCamCapability[mCameraId]->padding_info,
1934                                this,
1935                                newStream,
1936                                (cam_stream_type_t)
1937                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1938                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1939                                mMetadataChannel,
1940                                MAX_INFLIGHT_REQUESTS);
1941                        if (channel == NULL) {
1942                            LOGE("allocation of channel failed");
1943                            pthread_mutex_unlock(&mMutex);
1944                            return -ENOMEM;
1945                        }
1946                        newStream->max_buffers = channel->getNumBuffers();
1947                        newStream->priv = channel;
1948                    }
1949                    break;
1950                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1951                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1952                            mChannelHandle,
1953                            mCameraHandle->ops, captureResultCb,
1954                            &padding_info,
1955                            this,
1956                            newStream,
1957                            (cam_stream_type_t)
1958                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1959                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1960                            mMetadataChannel);
1961                    if (channel == NULL) {
1962                        LOGE("allocation of YUV channel failed");
1963                        pthread_mutex_unlock(&mMutex);
1964                        return -ENOMEM;
1965                    }
1966                    newStream->max_buffers = channel->getNumBuffers();
1967                    newStream->priv = channel;
1968                    break;
1969                }
1970                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1971                case HAL_PIXEL_FORMAT_RAW16:
1972                case HAL_PIXEL_FORMAT_RAW10:
1973                    mRawChannel = new QCamera3RawChannel(
1974                            mCameraHandle->camera_handle, mChannelHandle,
1975                            mCameraHandle->ops, captureResultCb,
1976                            &padding_info,
1977                            this, newStream, CAM_QCOM_FEATURE_NONE,
1978                            mMetadataChannel,
1979                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1980                    if (mRawChannel == NULL) {
1981                        LOGE("allocation of raw channel failed");
1982                        pthread_mutex_unlock(&mMutex);
1983                        return -ENOMEM;
1984                    }
1985                    newStream->max_buffers = mRawChannel->getNumBuffers();
1986                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1987                    break;
1988                case HAL_PIXEL_FORMAT_BLOB:
1989                    // Max live snapshot inflight buffer is 1. This is to mitigate
1990                    // frame drop issues for video snapshot. The more buffers being
1991                    // allocated, the more frame drops there are.
1992                    mPictureChannel = new QCamera3PicChannel(
1993                            mCameraHandle->camera_handle, mChannelHandle,
1994                            mCameraHandle->ops, captureResultCb,
1995                            &padding_info, this, newStream,
1996                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1997                            m_bIs4KVideo, isZsl, mMetadataChannel,
1998                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1999                    if (mPictureChannel == NULL) {
2000                        LOGE("allocation of channel failed");
2001                        pthread_mutex_unlock(&mMutex);
2002                        return -ENOMEM;
2003                    }
2004                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2005                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2006                    mPictureChannel->overrideYuvSize(
2007                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2008                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2009                    break;
2010
2011                default:
2012                    LOGE("not a supported format 0x%x", newStream->format);
2013                    break;
2014                }
2015            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2016                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2017            } else {
2018                LOGE("Error, Unknown stream type");
2019                pthread_mutex_unlock(&mMutex);
2020                return -EINVAL;
2021            }
2022
2023            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2024            if (channel != NULL && channel->isUBWCEnabled()) {
2025                cam_format_t fmt = channel->getStreamDefaultFormat(
2026                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
2027                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2028                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2029                }
2030            }
2031
2032            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2033                    it != mStreamInfo.end(); it++) {
2034                if ((*it)->stream == newStream) {
2035                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2036                    break;
2037                }
2038            }
2039        } else {
2040            // Channel already exists for this stream
2041            // Do nothing for now
2042        }
2043        padding_info = gCamCapability[mCameraId]->padding_info;
2044
2045        /* Do not add entries for input stream in metastream info
2046         * since there is no real stream associated with it
2047         */
2048        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2049            mStreamConfigInfo.num_streams++;
2050    }
2051
2052    //RAW DUMP channel
2053    if (mEnableRawDump && isRawStreamRequested == false){
2054        cam_dimension_t rawDumpSize;
2055        rawDumpSize = getMaxRawSize(mCameraId);
2056        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2057                                  mChannelHandle,
2058                                  mCameraHandle->ops,
2059                                  rawDumpSize,
2060                                  &padding_info,
2061                                  this, CAM_QCOM_FEATURE_NONE);
2062        if (!mRawDumpChannel) {
2063            LOGE("Raw Dump channel cannot be created");
2064            pthread_mutex_unlock(&mMutex);
2065            return -ENOMEM;
2066        }
2067    }
2068
2069
2070    if (mAnalysisChannel) {
2071        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2072                gCamCapability[mCameraId]->analysis_recommended_res;
2073        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2074                CAM_STREAM_TYPE_ANALYSIS;
2075        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2076                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2077        mStreamConfigInfo.num_streams++;
2078    }
2079
2080    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2081        mSupportChannel = new QCamera3SupportChannel(
2082                mCameraHandle->camera_handle,
2083                mChannelHandle,
2084                mCameraHandle->ops,
2085                &gCamCapability[mCameraId]->padding_info,
2086                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
2087                CAM_STREAM_TYPE_CALLBACK,
2088                &QCamera3SupportChannel::kDim,
2089                CAM_FORMAT_YUV_420_NV21,
2090                gCamCapability[mCameraId]->hw_analysis_supported,
2091                this);
2092        if (!mSupportChannel) {
2093            LOGE("dummy channel cannot be created");
2094            pthread_mutex_unlock(&mMutex);
2095            return -ENOMEM;
2096        }
2097    }
2098
2099    if (mSupportChannel) {
2100        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2101                QCamera3SupportChannel::kDim;
2102        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2103                CAM_STREAM_TYPE_CALLBACK;
2104        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2105                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2106        mStreamConfigInfo.num_streams++;
2107    }
2108
2109    if (mRawDumpChannel) {
2110        cam_dimension_t rawSize;
2111        rawSize = getMaxRawSize(mCameraId);
2112        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2113                rawSize;
2114        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2115                CAM_STREAM_TYPE_RAW;
2116        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2117                CAM_QCOM_FEATURE_NONE;
2118        mStreamConfigInfo.num_streams++;
2119    }
2120    /* In HFR mode, if video stream is not added, create a dummy channel so that
2121     * ISP can create a batch mode even for preview only case. This channel is
2122     * never 'start'ed (no stream-on), it is only 'initialized'  */
2123    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2124            !m_bIsVideo) {
2125        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2126                mChannelHandle,
2127                mCameraHandle->ops, captureResultCb,
2128                &gCamCapability[mCameraId]->padding_info,
2129                this,
2130                &mDummyBatchStream,
2131                CAM_STREAM_TYPE_VIDEO,
2132                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
2133                mMetadataChannel);
2134        if (NULL == mDummyBatchChannel) {
2135            LOGE("creation of mDummyBatchChannel failed."
2136                    "Preview will use non-hfr sensor mode ");
2137        }
2138    }
2139    if (mDummyBatchChannel) {
2140        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2141                mDummyBatchStream.width;
2142        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2143                mDummyBatchStream.height;
2144        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2145                CAM_STREAM_TYPE_VIDEO;
2146        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2147                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2148        mStreamConfigInfo.num_streams++;
2149    }
2150
2151    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2152    mStreamConfigInfo.buffer_info.max_buffers =
2153            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2154
2155    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
2156    for (pendingRequestIterator i = mPendingRequestsList.begin();
2157            i != mPendingRequestsList.end();) {
2158        i = erasePendingRequest(i);
2159    }
2160    mPendingFrameDropList.clear();
2161    // Initialize/Reset the pending buffers list
2162    mPendingBuffersMap.num_buffers = 0;
2163    mPendingBuffersMap.mPendingBufferList.clear();
2164    mPendingReprocessResultList.clear();
2165
2166    mCurJpegMeta.clear();
2167    //Get min frame duration for this streams configuration
2168    deriveMinFrameDuration();
2169
2170    // Update state
2171    mState = CONFIGURED;
2172
2173    pthread_mutex_unlock(&mMutex);
2174
2175    return rc;
2176}
2177
2178/*===========================================================================
2179 * FUNCTION   : validateCaptureRequest
2180 *
2181 * DESCRIPTION: validate a capture request from camera service
2182 *
2183 * PARAMETERS :
2184 *   @request : request from framework to process
2185 *
2186 * RETURN     :
2187 *
2188 *==========================================================================*/
2189int QCamera3HardwareInterface::validateCaptureRequest(
2190                    camera3_capture_request_t *request)
2191{
2192    ssize_t idx = 0;
2193    const camera3_stream_buffer_t *b;
2194    CameraMetadata meta;
2195
2196    /* Sanity check the request */
2197    if (request == NULL) {
2198        LOGE("NULL capture request");
2199        return BAD_VALUE;
2200    }
2201
2202    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2203        /*settings cannot be null for the first request*/
2204        return BAD_VALUE;
2205    }
2206
2207    uint32_t frameNumber = request->frame_number;
2208    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2209        LOGE("Request %d: No output buffers provided!",
2210                __FUNCTION__, frameNumber);
2211        return BAD_VALUE;
2212    }
2213    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2214        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2215                 request->num_output_buffers, MAX_NUM_STREAMS);
2216        return BAD_VALUE;
2217    }
2218    if (request->input_buffer != NULL) {
2219        b = request->input_buffer;
2220        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2221            LOGE("Request %d: Buffer %ld: Status not OK!",
2222                     frameNumber, (long)idx);
2223            return BAD_VALUE;
2224        }
2225        if (b->release_fence != -1) {
2226            LOGE("Request %d: Buffer %ld: Has a release fence!",
2227                     frameNumber, (long)idx);
2228            return BAD_VALUE;
2229        }
2230        if (b->buffer == NULL) {
2231            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2232                     frameNumber, (long)idx);
2233            return BAD_VALUE;
2234        }
2235    }
2236
2237    // Validate all buffers
2238    b = request->output_buffers;
2239    do {
2240        QCamera3ProcessingChannel *channel =
2241                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2242        if (channel == NULL) {
2243            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2244                     frameNumber, (long)idx);
2245            return BAD_VALUE;
2246        }
2247        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2248            LOGE("Request %d: Buffer %ld: Status not OK!",
2249                     frameNumber, (long)idx);
2250            return BAD_VALUE;
2251        }
2252        if (b->release_fence != -1) {
2253            LOGE("Request %d: Buffer %ld: Has a release fence!",
2254                     frameNumber, (long)idx);
2255            return BAD_VALUE;
2256        }
2257        if (b->buffer == NULL) {
2258            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2259                     frameNumber, (long)idx);
2260            return BAD_VALUE;
2261        }
2262        if (*(b->buffer) == NULL) {
2263            LOGE("Request %d: Buffer %ld: NULL private handle!",
2264                     frameNumber, (long)idx);
2265            return BAD_VALUE;
2266        }
2267        idx++;
2268        b = request->output_buffers + idx;
2269    } while (idx < (ssize_t)request->num_output_buffers);
2270
2271    return NO_ERROR;
2272}
2273
2274/*===========================================================================
2275 * FUNCTION   : deriveMinFrameDuration
2276 *
2277 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2278 *              on currently configured streams.
2279 *
2280 * PARAMETERS : NONE
2281 *
2282 * RETURN     : NONE
2283 *
2284 *==========================================================================*/
2285void QCamera3HardwareInterface::deriveMinFrameDuration()
2286{
2287    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2288
2289    maxJpegDim = 0;
2290    maxProcessedDim = 0;
2291    maxRawDim = 0;
2292
2293    // Figure out maximum jpeg, processed, and raw dimensions
2294    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2295        it != mStreamInfo.end(); it++) {
2296
2297        // Input stream doesn't have valid stream_type
2298        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2299            continue;
2300
2301        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2302        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2303            if (dimension > maxJpegDim)
2304                maxJpegDim = dimension;
2305        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2306                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2307                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2308            if (dimension > maxRawDim)
2309                maxRawDim = dimension;
2310        } else {
2311            if (dimension > maxProcessedDim)
2312                maxProcessedDim = dimension;
2313        }
2314    }
2315
2316    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2317            MAX_SIZES_CNT);
2318
2319    //Assume all jpeg dimensions are in processed dimensions.
2320    if (maxJpegDim > maxProcessedDim)
2321        maxProcessedDim = maxJpegDim;
2322    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2323    if (maxProcessedDim > maxRawDim) {
2324        maxRawDim = INT32_MAX;
2325
2326        for (size_t i = 0; i < count; i++) {
2327            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2328                    gCamCapability[mCameraId]->raw_dim[i].height;
2329            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2330                maxRawDim = dimension;
2331        }
2332    }
2333
2334    //Find minimum durations for processed, jpeg, and raw
2335    for (size_t i = 0; i < count; i++) {
2336        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2337                gCamCapability[mCameraId]->raw_dim[i].height) {
2338            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2339            break;
2340        }
2341    }
2342    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2343    for (size_t i = 0; i < count; i++) {
2344        if (maxProcessedDim ==
2345                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2346                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2347            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2348            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2349            break;
2350        }
2351    }
2352}
2353
2354/*===========================================================================
2355 * FUNCTION   : getMinFrameDuration
2356 *
2357 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2358 *              and current request configuration.
2359 *
2360 * PARAMETERS : @request: requset sent by the frameworks
2361 *
2362 * RETURN     : min farme duration for a particular request
2363 *
2364 *==========================================================================*/
2365int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2366{
2367    bool hasJpegStream = false;
2368    bool hasRawStream = false;
2369    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2370        const camera3_stream_t *stream = request->output_buffers[i].stream;
2371        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2372            hasJpegStream = true;
2373        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2374                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2375                stream->format == HAL_PIXEL_FORMAT_RAW16)
2376            hasRawStream = true;
2377    }
2378
2379    if (!hasJpegStream)
2380        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2381    else
2382        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2383}
2384
2385/*===========================================================================
2386 * FUNCTION   : handlePendingReprocResults
2387 *
2388 * DESCRIPTION: check and notify on any pending reprocess results
2389 *
2390 * PARAMETERS :
2391 *   @frame_number   : Pending request frame number
2392 *
2393 * RETURN     : int32_t type of status
2394 *              NO_ERROR  -- success
2395 *              none-zero failure code
2396 *==========================================================================*/
2397int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2398{
2399    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2400            j != mPendingReprocessResultList.end(); j++) {
2401        if (j->frame_number == frame_number) {
2402            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2403
2404            LOGD("Delayed reprocess notify %d",
2405                    frame_number);
2406
2407            for (pendingRequestIterator k = mPendingRequestsList.begin();
2408                    k != mPendingRequestsList.end(); k++) {
2409
2410                if (k->frame_number == j->frame_number) {
2411                    LOGD("Found reprocess frame number %d in pending reprocess List "
2412                            "Take it out!!",
2413                            k->frame_number);
2414
2415                    camera3_capture_result result;
2416                    memset(&result, 0, sizeof(camera3_capture_result));
2417                    result.frame_number = frame_number;
2418                    result.num_output_buffers = 1;
2419                    result.output_buffers =  &j->buffer;
2420                    result.input_buffer = k->input_buffer;
2421                    result.result = k->settings;
2422                    result.partial_result = PARTIAL_RESULT_COUNT;
2423                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2424
2425                    erasePendingRequest(k);
2426                    break;
2427                }
2428            }
2429            mPendingReprocessResultList.erase(j);
2430            break;
2431        }
2432    }
2433    return NO_ERROR;
2434}
2435
2436/*===========================================================================
2437 * FUNCTION   : handleBatchMetadata
2438 *
2439 * DESCRIPTION: Handles metadata buffer callback in batch mode
2440 *
2441 * PARAMETERS : @metadata_buf: metadata buffer
2442 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2443 *                 the meta buf in this method
2444 *
2445 * RETURN     :
2446 *
2447 *==========================================================================*/
2448void QCamera3HardwareInterface::handleBatchMetadata(
2449        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2450{
2451    ATRACE_CALL();
2452
2453    if (NULL == metadata_buf) {
2454        LOGE("metadata_buf is NULL");
2455        return;
2456    }
2457    /* In batch mode, the metdata will contain the frame number and timestamp of
2458     * the last frame in the batch. Eg: a batch containing buffers from request
2459     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2460     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2461     * multiple process_capture_results */
2462    metadata_buffer_t *metadata =
2463            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2464    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2465    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2466    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2467    uint32_t frame_number = 0, urgent_frame_number = 0;
2468    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2469    bool invalid_metadata = false;
2470    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2471    size_t loopCount = 1;
2472
2473    int32_t *p_frame_number_valid =
2474            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2475    uint32_t *p_frame_number =
2476            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2477    int64_t *p_capture_time =
2478            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2479    int32_t *p_urgent_frame_number_valid =
2480            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2481    uint32_t *p_urgent_frame_number =
2482            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2483
2484    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2485            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2486            (NULL == p_urgent_frame_number)) {
2487        LOGE("Invalid metadata");
2488        invalid_metadata = true;
2489    } else {
2490        frame_number_valid = *p_frame_number_valid;
2491        last_frame_number = *p_frame_number;
2492        last_frame_capture_time = *p_capture_time;
2493        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2494        last_urgent_frame_number = *p_urgent_frame_number;
2495    }
2496
2497    /* In batchmode, when no video buffers are requested, set_parms are sent
2498     * for every capture_request. The difference between consecutive urgent
2499     * frame numbers and frame numbers should be used to interpolate the
2500     * corresponding frame numbers and time stamps */
2501    pthread_mutex_lock(&mMutex);
2502    if (urgent_frame_number_valid) {
2503        first_urgent_frame_number =
2504                mPendingBatchMap.valueFor(last_urgent_frame_number);
2505        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2506                first_urgent_frame_number;
2507
2508        LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2509                 urgent_frame_number_valid,
2510                first_urgent_frame_number, last_urgent_frame_number);
2511    }
2512
2513    if (frame_number_valid) {
2514        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2515        frameNumDiff = last_frame_number + 1 -
2516                first_frame_number;
2517        mPendingBatchMap.removeItem(last_frame_number);
2518
2519        LOGD("frm: valid: %d frm_num: %d - %d",
2520                 frame_number_valid,
2521                first_frame_number, last_frame_number);
2522
2523    }
2524    pthread_mutex_unlock(&mMutex);
2525
2526    if (urgent_frame_number_valid || frame_number_valid) {
2527        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2528        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2529            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2530                     urgentFrameNumDiff, last_urgent_frame_number);
2531        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2532            LOGE("frameNumDiff: %d frameNum: %d",
2533                     frameNumDiff, last_frame_number);
2534    }
2535
2536    for (size_t i = 0; i < loopCount; i++) {
2537        /* handleMetadataWithLock is called even for invalid_metadata for
2538         * pipeline depth calculation */
2539        if (!invalid_metadata) {
2540            /* Infer frame number. Batch metadata contains frame number of the
2541             * last frame */
2542            if (urgent_frame_number_valid) {
2543                if (i < urgentFrameNumDiff) {
2544                    urgent_frame_number =
2545                            first_urgent_frame_number + i;
2546                    LOGD("inferred urgent frame_number: %d",
2547                             urgent_frame_number);
2548                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2549                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2550                } else {
2551                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2552                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2553                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2554                }
2555            }
2556
2557            /* Infer frame number. Batch metadata contains frame number of the
2558             * last frame */
2559            if (frame_number_valid) {
2560                if (i < frameNumDiff) {
2561                    frame_number = first_frame_number + i;
2562                    LOGD("inferred frame_number: %d", frame_number);
2563                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2564                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2565                } else {
2566                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2567                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2568                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2569                }
2570            }
2571
2572            if (last_frame_capture_time) {
2573                //Infer timestamp
2574                first_frame_capture_time = last_frame_capture_time -
2575                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2576                capture_time =
2577                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2578                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2579                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2580                LOGD("batch capture_time: %lld, capture_time: %lld",
2581                         last_frame_capture_time, capture_time);
2582            }
2583        }
2584        pthread_mutex_lock(&mMutex);
2585        handleMetadataWithLock(metadata_buf,
2586                false /* free_and_bufdone_meta_buf */);
2587        pthread_mutex_unlock(&mMutex);
2588    }
2589
2590    /* BufDone metadata buffer */
2591    if (free_and_bufdone_meta_buf) {
2592        mMetadataChannel->bufDone(metadata_buf);
2593        free(metadata_buf);
2594    }
2595}
2596
2597/*===========================================================================
2598 * FUNCTION   : handleMetadataWithLock
2599 *
2600 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2601 *
2602 * PARAMETERS : @metadata_buf: metadata buffer
2603 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2604 *                 the meta buf in this method
2605 *
2606 * RETURN     :
2607 *
2608 *==========================================================================*/
2609void QCamera3HardwareInterface::handleMetadataWithLock(
2610    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2611{
2612    ATRACE_CALL();
2613    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2614        //during flush do not send metadata from this thread
2615        LOGD("not sending metadata during flush or when mState is error");
2616        if (free_and_bufdone_meta_buf) {
2617            mMetadataChannel->bufDone(metadata_buf);
2618            free(metadata_buf);
2619        }
2620        return;
2621    }
2622
2623    //not in flush
2624    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2625    int32_t frame_number_valid, urgent_frame_number_valid;
2626    uint32_t frame_number, urgent_frame_number;
2627    int64_t capture_time;
2628
2629    int32_t *p_frame_number_valid =
2630            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2631    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2632    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2633    int32_t *p_urgent_frame_number_valid =
2634            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2635    uint32_t *p_urgent_frame_number =
2636            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2637    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2638            metadata) {
2639        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2640                 *p_frame_number_valid, *p_frame_number);
2641    }
2642
2643    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2644            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2645        LOGE("Invalid metadata");
2646        if (free_and_bufdone_meta_buf) {
2647            mMetadataChannel->bufDone(metadata_buf);
2648            free(metadata_buf);
2649        }
2650        goto done_metadata;
2651    } else {
2652        frame_number_valid = *p_frame_number_valid;
2653        frame_number = *p_frame_number;
2654        capture_time = *p_capture_time;
2655        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2656        urgent_frame_number = *p_urgent_frame_number;
2657    }
2658    //Partial result on process_capture_result for timestamp
2659    if (urgent_frame_number_valid) {
2660        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2661           urgent_frame_number, capture_time);
2662
2663        //Recieved an urgent Frame Number, handle it
2664        //using partial results
2665        for (pendingRequestIterator i =
2666                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2667            LOGD("Iterator Frame = %d urgent frame = %d",
2668                 i->frame_number, urgent_frame_number);
2669
2670            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2671                (i->partial_result_cnt == 0)) {
2672                LOGE("Error: HAL missed urgent metadata for frame number %d",
2673                         i->frame_number);
2674            }
2675
2676            if (i->frame_number == urgent_frame_number &&
2677                     i->bUrgentReceived == 0) {
2678
2679                camera3_capture_result_t result;
2680                memset(&result, 0, sizeof(camera3_capture_result_t));
2681
2682                i->partial_result_cnt++;
2683                i->bUrgentReceived = 1;
2684                // Extract 3A metadata
2685                result.result =
2686                    translateCbUrgentMetadataToResultMetadata(metadata);
2687                // Populate metadata result
2688                result.frame_number = urgent_frame_number;
2689                result.num_output_buffers = 0;
2690                result.output_buffers = NULL;
2691                result.partial_result = i->partial_result_cnt;
2692
2693                mCallbackOps->process_capture_result(mCallbackOps, &result);
2694                LOGD("urgent frame_number = %u, capture_time = %lld",
2695                      result.frame_number, capture_time);
2696                free_camera_metadata((camera_metadata_t *)result.result);
2697                break;
2698            }
2699        }
2700    }
2701
2702    if (!frame_number_valid) {
2703        LOGD("Not a valid normal frame number, used as SOF only");
2704        if (free_and_bufdone_meta_buf) {
2705            mMetadataChannel->bufDone(metadata_buf);
2706            free(metadata_buf);
2707        }
2708        goto done_metadata;
2709    }
2710    LOGH("valid frame_number = %u, capture_time = %lld",
2711            frame_number, capture_time);
2712
2713    for (pendingRequestIterator i = mPendingRequestsList.begin();
2714            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2715        // Flush out all entries with less or equal frame numbers.
2716
2717        camera3_capture_result_t result;
2718        memset(&result, 0, sizeof(camera3_capture_result_t));
2719
2720        LOGD("frame_number in the list is %u", i->frame_number);
2721        i->partial_result_cnt++;
2722        result.partial_result = i->partial_result_cnt;
2723
2724        // Check whether any stream buffer corresponding to this is dropped or not
2725        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2726        // The API does not expect a blob buffer to be dropped
2727        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2728            /* Clear notify_msg structure */
2729            camera3_notify_msg_t notify_msg;
2730            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2731            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2732                    j != i->buffers.end(); j++) {
2733                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2734                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2735                for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2736                    if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2737                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2738                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2739                                __func__, i->frame_number, streamID, j->stream->format);
2740                        notify_msg.type = CAMERA3_MSG_ERROR;
2741                        notify_msg.message.error.frame_number = i->frame_number;
2742                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2743                        notify_msg.message.error.error_stream = j->stream;
2744                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2745                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2746                                __func__, i->frame_number, streamID, j->stream->format);
2747                        PendingFrameDropInfo PendingFrameDrop;
2748                        PendingFrameDrop.frame_number=i->frame_number;
2749                        PendingFrameDrop.stream_ID = streamID;
2750                        // Add the Frame drop info to mPendingFrameDropList
2751                        mPendingFrameDropList.push_back(PendingFrameDrop);
2752                   }
2753               }
2754            }
2755        }
2756
2757        // Send empty metadata with already filled buffers for dropped metadata
2758        // and send valid metadata with already filled buffers for current metadata
2759        /* we could hit this case when we either
2760         * 1. have a pending reprocess request or
2761         * 2. miss a metadata buffer callback */
2762        if (i->frame_number < frame_number) {
2763            if (i->input_buffer) {
2764                /* this will be handled in handleInputBufferWithLock */
2765                i++;
2766                continue;
2767            } else {
2768                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
2769                if (free_and_bufdone_meta_buf) {
2770                    mMetadataChannel->bufDone(metadata_buf);
2771                    free(metadata_buf);
2772                }
2773                mState = ERROR;
2774                goto done_metadata;
2775            }
2776        } else {
2777            mPendingLiveRequest--;
2778            /* Clear notify_msg structure */
2779            camera3_notify_msg_t notify_msg;
2780            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2781
2782            // Send shutter notify to frameworks
2783            notify_msg.type = CAMERA3_MSG_SHUTTER;
2784            notify_msg.message.shutter.frame_number = i->frame_number;
2785            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2786            mCallbackOps->notify(mCallbackOps, &notify_msg);
2787
2788            i->timestamp = capture_time;
2789
2790            // Find channel requiring metadata, meaning internal offline postprocess
2791            // is needed.
2792            //TODO: for now, we don't support two streams requiring metadata at the same time.
2793            // (because we are not making copies, and metadata buffer is not reference counted.
2794            bool internalPproc = false;
2795            for (pendingBufferIterator iter = i->buffers.begin();
2796                    iter != i->buffers.end(); iter++) {
2797                if (iter->need_metadata) {
2798                    internalPproc = true;
2799                    QCamera3ProcessingChannel *channel =
2800                            (QCamera3ProcessingChannel *)iter->stream->priv;
2801                    channel->queueReprocMetadata(metadata_buf);
2802                    break;
2803                }
2804            }
2805
2806            result.result = translateFromHalMetadata(metadata,
2807                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2808                    i->capture_intent, internalPproc, i->fwkCacMode);
2809
2810            saveExifParams(metadata);
2811
2812            if (i->blob_request) {
2813                {
2814                    //Dump tuning metadata if enabled and available
2815                    char prop[PROPERTY_VALUE_MAX];
2816                    memset(prop, 0, sizeof(prop));
2817                    property_get("persist.camera.dumpmetadata", prop, "0");
2818                    int32_t enabled = atoi(prop);
2819                    if (enabled && metadata->is_tuning_params_valid) {
2820                        dumpMetadataToFile(metadata->tuning_params,
2821                               mMetaFrameCount,
2822                               enabled,
2823                               "Snapshot",
2824                               frame_number);
2825                    }
2826                }
2827            }
2828
2829            if (!internalPproc) {
2830                LOGD("couldn't find need_metadata for this metadata");
2831                // Return metadata buffer
2832                if (free_and_bufdone_meta_buf) {
2833                    mMetadataChannel->bufDone(metadata_buf);
2834                    free(metadata_buf);
2835                }
2836            }
2837        }
2838        if (!result.result) {
2839            LOGE("metadata is NULL");
2840        }
2841        result.frame_number = i->frame_number;
2842        result.input_buffer = i->input_buffer;
2843        result.num_output_buffers = 0;
2844        result.output_buffers = NULL;
2845        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2846                    j != i->buffers.end(); j++) {
2847            if (j->buffer) {
2848                result.num_output_buffers++;
2849            }
2850        }
2851
2852        updateFpsInPreviewBuffer(metadata, i->frame_number);
2853
2854        if (result.num_output_buffers > 0) {
2855            camera3_stream_buffer_t *result_buffers =
2856                new camera3_stream_buffer_t[result.num_output_buffers];
2857            if (!result_buffers) {
2858                LOGE("Fatal error: out of memory");
2859            }
2860            size_t result_buffers_idx = 0;
2861            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2862                    j != i->buffers.end(); j++) {
2863                if (j->buffer) {
2864                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2865                            m != mPendingFrameDropList.end(); m++) {
2866                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2867                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2868                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2869                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2870                            LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
2871                                   frame_number, streamID);
2872                            m = mPendingFrameDropList.erase(m);
2873                            break;
2874                        }
2875                    }
2876
2877                    for (List<PendingBufferInfo>::iterator k =
2878                      mPendingBuffersMap.mPendingBufferList.begin();
2879                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2880                      if (k->buffer == j->buffer->buffer) {
2881                        LOGD("Found buffer %p in pending buffer List "
2882                              "for frame %u, Take it out!!",
2883                               k->buffer, k->frame_number);
2884                        mPendingBuffersMap.num_buffers--;
2885                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2886                        break;
2887                      }
2888                    }
2889
2890                    result_buffers[result_buffers_idx++] = *(j->buffer);
2891                    free(j->buffer);
2892                    j->buffer = NULL;
2893                }
2894            }
2895            result.output_buffers = result_buffers;
2896            mCallbackOps->process_capture_result(mCallbackOps, &result);
2897            LOGD("meta frame_number = %u, capture_time = %lld",
2898                  result.frame_number, i->timestamp);
2899            free_camera_metadata((camera_metadata_t *)result.result);
2900            delete[] result_buffers;
2901        } else {
2902            mCallbackOps->process_capture_result(mCallbackOps, &result);
2903            LOGD("meta frame_number = %u, capture_time = %lld",
2904                  result.frame_number, i->timestamp);
2905            free_camera_metadata((camera_metadata_t *)result.result);
2906        }
2907
2908        i = erasePendingRequest(i);
2909
2910        if (!mPendingReprocessResultList.empty()) {
2911            handlePendingReprocResults(frame_number + 1);
2912        }
2913    }
2914
2915done_metadata:
2916    for (pendingRequestIterator i = mPendingRequestsList.begin();
2917            i != mPendingRequestsList.end() ;i++) {
2918        i->pipeline_depth++;
2919    }
2920    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
2921    unblockRequestIfNecessary();
2922}
2923
2924/*===========================================================================
2925 * FUNCTION   : hdrPlusPerfLock
2926 *
2927 * DESCRIPTION: perf lock for HDR+ using custom intent
2928 *
2929 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2930 *
2931 * RETURN     : None
2932 *
2933 *==========================================================================*/
2934void QCamera3HardwareInterface::hdrPlusPerfLock(
2935        mm_camera_super_buf_t *metadata_buf)
2936{
2937    if (NULL == metadata_buf) {
2938        LOGE("metadata_buf is NULL");
2939        return;
2940    }
2941    metadata_buffer_t *metadata =
2942            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2943    int32_t *p_frame_number_valid =
2944            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2945    uint32_t *p_frame_number =
2946            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2947
2948    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
2949        LOGE("%s: Invalid metadata", __func__);
2950        return;
2951    }
2952
2953    //acquire perf lock for 5 sec after the last HDR frame is captured
2954    if (*p_frame_number_valid) {
2955        if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2956            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2957        }
2958    }
2959
2960    //release lock after perf lock timer is expired. If lock is already released,
2961    //isTimerReset returns false
2962    if (m_perfLock.isTimerReset()) {
2963        mLastCustIntentFrmNum = -1;
2964        m_perfLock.lock_rel_timed();
2965    }
2966}
2967
2968/*===========================================================================
2969 * FUNCTION   : handleInputBufferWithLock
2970 *
2971 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
2972 *
2973 * PARAMETERS : @frame_number: frame number of the input buffer
2974 *
2975 * RETURN     :
2976 *
2977 *==========================================================================*/
2978void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
2979{
2980    ATRACE_CALL();
2981    pendingRequestIterator i = mPendingRequestsList.begin();
2982    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2983        i++;
2984    }
2985    if (i != mPendingRequestsList.end() && i->input_buffer) {
2986        //found the right request
2987        if (!i->shutter_notified) {
2988            CameraMetadata settings;
2989            camera3_notify_msg_t notify_msg;
2990            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2991            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2992            if(i->settings) {
2993                settings = i->settings;
2994                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2995                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2996                } else {
2997                    LOGE("No timestamp in input settings! Using current one.");
2998                }
2999            } else {
3000                LOGE("Input settings missing!");
3001            }
3002
3003            notify_msg.type = CAMERA3_MSG_SHUTTER;
3004            notify_msg.message.shutter.frame_number = frame_number;
3005            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3006            mCallbackOps->notify(mCallbackOps, &notify_msg);
3007            i->shutter_notified = true;
3008            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3009                        i->frame_number, notify_msg.message.shutter.timestamp);
3010        }
3011
3012        if (i->input_buffer->release_fence != -1) {
3013           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3014           close(i->input_buffer->release_fence);
3015           if (rc != OK) {
3016               LOGE("input buffer sync wait failed %d", rc);
3017           }
3018        }
3019
3020        camera3_capture_result result;
3021        memset(&result, 0, sizeof(camera3_capture_result));
3022        result.frame_number = frame_number;
3023        result.result = i->settings;
3024        result.input_buffer = i->input_buffer;
3025        result.partial_result = PARTIAL_RESULT_COUNT;
3026
3027        mCallbackOps->process_capture_result(mCallbackOps, &result);
3028        LOGD("Input request metadata and input buffer frame_number = %u",
3029                        i->frame_number);
3030        i = erasePendingRequest(i);
3031    } else {
3032        LOGE("Could not find input request for frame number %d", frame_number);
3033    }
3034}
3035
3036/*===========================================================================
3037 * FUNCTION   : handleBufferWithLock
3038 *
3039 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3040 *
3041 * PARAMETERS : @buffer: image buffer for the callback
3042 *              @frame_number: frame number of the image buffer
3043 *
3044 * RETURN     :
3045 *
3046 *==========================================================================*/
3047void QCamera3HardwareInterface::handleBufferWithLock(
3048    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3049{
3050    ATRACE_CALL();
3051    if (mFlushPerf) {
3052        // flush case
3053        //go through the pending buffers and mark them as returned.
3054        LOGD("Handle buffer with lock called during flush");
3055        for (List<PendingBufferInfo>::iterator i =
3056                mPendingBuffersMap.mPendingBufferList.begin();
3057                i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3058            if (i->buffer == buffer->buffer) {
3059                mPendingBuffersMap.num_buffers--;
3060                LOGD("Found Frame buffer, updated num_buffers %d, ",
3061                         mPendingBuffersMap.num_buffers);
3062                break;
3063            }
3064        }
3065        if (mPendingBuffersMap.num_buffers == 0) {
3066            //signal the flush()
3067            LOGD("All buffers returned to HAL continue flush");
3068            pthread_cond_signal(&mBuffersCond);
3069        }
3070        return;
3071    }
3072    /* Nothing to be done during error state */
3073    if ((ERROR == mState) || (DEINIT == mState)) {
3074        return;
3075    }
3076
3077    //not in flush
3078    // If the frame number doesn't exist in the pending request list,
3079    // directly send the buffer to the frameworks, and update pending buffers map
3080    // Otherwise, book-keep the buffer.
3081    pendingRequestIterator i = mPendingRequestsList.begin();
3082    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3083        i++;
3084    }
3085    if (i == mPendingRequestsList.end()) {
3086        // Verify all pending requests frame_numbers are greater
3087        for (pendingRequestIterator j = mPendingRequestsList.begin();
3088                j != mPendingRequestsList.end(); j++) {
3089            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3090                LOGW("Error: pending live frame number %d is smaller than %d",
3091                         j->frame_number, frame_number);
3092            }
3093        }
3094        camera3_capture_result_t result;
3095        memset(&result, 0, sizeof(camera3_capture_result_t));
3096        result.result = NULL;
3097        result.frame_number = frame_number;
3098        result.num_output_buffers = 1;
3099        result.partial_result = 0;
3100        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3101                m != mPendingFrameDropList.end(); m++) {
3102            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3103            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3104            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3105                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3106                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3107                         frame_number, streamID);
3108                m = mPendingFrameDropList.erase(m);
3109                break;
3110            }
3111        }
3112        result.output_buffers = buffer;
3113        LOGH("result frame_number = %d, buffer = %p",
3114                 frame_number, buffer->buffer);
3115
3116        for (List<PendingBufferInfo>::iterator k =
3117                mPendingBuffersMap.mPendingBufferList.begin();
3118                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3119            if (k->buffer == buffer->buffer) {
3120                LOGD("Found Frame buffer, take it out from list");
3121
3122                mPendingBuffersMap.num_buffers--;
3123                k = mPendingBuffersMap.mPendingBufferList.erase(k);
3124                break;
3125            }
3126        }
3127        LOGD("mPendingBuffersMap.num_buffers = %d",
3128             mPendingBuffersMap.num_buffers);
3129
3130        mCallbackOps->process_capture_result(mCallbackOps, &result);
3131    } else {
3132        if (i->input_buffer) {
3133            CameraMetadata settings;
3134            camera3_notify_msg_t notify_msg;
3135            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3136            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3137            if(i->settings) {
3138                settings = i->settings;
3139                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3140                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3141                } else {
3142                    LOGW("No timestamp in input settings! Using current one.");
3143                }
3144            } else {
3145                LOGE("Input settings missing!");
3146            }
3147
3148            notify_msg.type = CAMERA3_MSG_SHUTTER;
3149            notify_msg.message.shutter.frame_number = frame_number;
3150            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3151
3152            if (i->input_buffer->release_fence != -1) {
3153               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3154               close(i->input_buffer->release_fence);
3155               if (rc != OK) {
3156                   LOGE("input buffer sync wait failed %d", rc);
3157               }
3158            }
3159
3160            for (List<PendingBufferInfo>::iterator k =
3161                    mPendingBuffersMap.mPendingBufferList.begin();
3162                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3163                if (k->buffer == buffer->buffer) {
3164                    LOGD("Found Frame buffer, take it out from list");
3165
3166                    mPendingBuffersMap.num_buffers--;
3167                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
3168                    break;
3169                }
3170            }
3171            LOGD("mPendingBuffersMap.num_buffers = %d",
3172                 mPendingBuffersMap.num_buffers);
3173
3174            bool notifyNow = true;
3175            for (pendingRequestIterator j = mPendingRequestsList.begin();
3176                    j != mPendingRequestsList.end(); j++) {
3177                if (j->frame_number < frame_number) {
3178                    notifyNow = false;
3179                    break;
3180                }
3181            }
3182
3183            if (notifyNow) {
3184                camera3_capture_result result;
3185                memset(&result, 0, sizeof(camera3_capture_result));
3186                result.frame_number = frame_number;
3187                result.result = i->settings;
3188                result.input_buffer = i->input_buffer;
3189                result.num_output_buffers = 1;
3190                result.output_buffers = buffer;
3191                result.partial_result = PARTIAL_RESULT_COUNT;
3192
3193                mCallbackOps->notify(mCallbackOps, &notify_msg);
3194                mCallbackOps->process_capture_result(mCallbackOps, &result);
3195                LOGD("Notify reprocess now %d!", frame_number);
3196                i = erasePendingRequest(i);
3197            } else {
3198                // Cache reprocess result for later
3199                PendingReprocessResult pendingResult;
3200                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3201                pendingResult.notify_msg = notify_msg;
3202                pendingResult.buffer = *buffer;
3203                pendingResult.frame_number = frame_number;
3204                mPendingReprocessResultList.push_back(pendingResult);
3205                LOGD("Cache reprocess result %d!", frame_number);
3206            }
3207        } else {
3208            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3209                j != i->buffers.end(); j++) {
3210                if (j->stream == buffer->stream) {
3211                    if (j->buffer != NULL) {
3212                        LOGE("Error: buffer is already set");
3213                    } else {
3214                        j->buffer = (camera3_stream_buffer_t *)malloc(
3215                            sizeof(camera3_stream_buffer_t));
3216                        *(j->buffer) = *buffer;
3217                        LOGH("cache buffer %p at result frame_number %d",
3218                             buffer, frame_number);
3219                    }
3220                }
3221            }
3222        }
3223    }
3224}
3225
3226/*===========================================================================
3227 * FUNCTION   : unblockRequestIfNecessary
3228 *
3229 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3230 *              that mMutex is held when this function is called.
3231 *
3232 * PARAMETERS :
3233 *
3234 * RETURN     :
3235 *
3236 *==========================================================================*/
3237void QCamera3HardwareInterface::unblockRequestIfNecessary()
3238{
3239   // Unblock process_capture_request
3240   pthread_cond_signal(&mRequestCond);
3241}
3242
3243
3244/*===========================================================================
3245 * FUNCTION   : processCaptureRequest
3246 *
3247 * DESCRIPTION: process a capture request from camera service
3248 *
3249 * PARAMETERS :
3250 *   @request : request from framework to process
3251 *
3252 * RETURN     :
3253 *
3254 *==========================================================================*/
3255int QCamera3HardwareInterface::processCaptureRequest(
3256                    camera3_capture_request_t *request)
3257{
3258    ATRACE_CALL();
3259    int rc = NO_ERROR;
3260    int32_t request_id;
3261    CameraMetadata meta;
3262    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3263    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3264    bool isVidBufRequested = false;
3265    camera3_stream_buffer_t *pInputBuffer = NULL;
3266
3267    pthread_mutex_lock(&mMutex);
3268
3269    // Validate current state
3270    switch (mState) {
3271        case CONFIGURED:
3272        case STARTED:
3273            /* valid state */
3274            break;
3275
3276        case ERROR:
3277            pthread_mutex_unlock(&mMutex);
3278            handleCameraDeviceError();
3279            return -ENODEV;
3280
3281        default:
3282            LOGE("Invalid state %d", mState);
3283            pthread_mutex_unlock(&mMutex);
3284            return -ENODEV;
3285    }
3286
3287    rc = validateCaptureRequest(request);
3288    if (rc != NO_ERROR) {
3289        LOGE("incoming request is not valid");
3290        pthread_mutex_unlock(&mMutex);
3291        return rc;
3292    }
3293
3294    meta = request->settings;
3295
3296    // For first capture request, send capture intent, and
3297    // stream on all streams
3298    if (mState == CONFIGURED) {
3299        // send an unconfigure to the backend so that the isp
3300        // resources are deallocated
3301        if (!mFirstConfiguration) {
3302            cam_stream_size_info_t stream_config_info;
3303            int32_t hal_version = CAM_HAL_V3;
3304            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3305            stream_config_info.buffer_info.min_buffers =
3306                    MIN_INFLIGHT_REQUESTS;
3307            stream_config_info.buffer_info.max_buffers =
3308                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3309            clear_metadata_buffer(mParameters);
3310            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3311                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3312            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3313                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3314            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3315                    mParameters);
3316            if (rc < 0) {
3317                LOGE("set_parms for unconfigure failed");
3318                pthread_mutex_unlock(&mMutex);
3319                return rc;
3320            }
3321        }
3322        m_perfLock.lock_acq();
3323        /* get eis information for stream configuration */
3324        cam_is_type_t is_type;
3325        char is_type_value[PROPERTY_VALUE_MAX];
3326        property_get("persist.camera.is_type", is_type_value, "0");
3327        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3328
3329        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3330            int32_t hal_version = CAM_HAL_V3;
3331            uint8_t captureIntent =
3332                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3333            mCaptureIntent = captureIntent;
3334            clear_metadata_buffer(mParameters);
3335            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3336            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3337        }
3338
3339        //If EIS is enabled, turn it on for video
3340        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3341        int32_t vsMode;
3342        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3343        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3344            rc = BAD_VALUE;
3345        }
3346
3347        //IS type will be 0 unless EIS is supported. If EIS is supported
3348        //it could either be 1 or 4 depending on the stream and video size
3349        if (setEis) {
3350            if (!m_bEisSupportedSize) {
3351                is_type = IS_TYPE_DIS;
3352            } else {
3353                is_type = IS_TYPE_EIS_2_0;
3354            }
3355            mStreamConfigInfo.is_type = is_type;
3356        } else {
3357            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3358        }
3359
3360        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3361                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3362        int32_t tintless_value = 1;
3363        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3364                CAM_INTF_PARM_TINTLESS, tintless_value);
3365        //Disable CDS for HFR mode or if DIS/EIS is on.
3366        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3367        //after every configure_stream
3368        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3369                (m_bIsVideo)) {
3370            int32_t cds = CAM_CDS_MODE_OFF;
3371            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3372                    CAM_INTF_PARM_CDS_MODE, cds))
3373                LOGE("Failed to disable CDS for HFR mode");
3374
3375        }
3376        setMobicat();
3377
3378        /* Set fps and hfr mode while sending meta stream info so that sensor
3379         * can configure appropriate streaming mode */
3380        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3381        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3382            rc = setHalFpsRange(meta, mParameters);
3383            if (rc != NO_ERROR) {
3384                LOGE("setHalFpsRange failed");
3385            }
3386        }
3387        if (meta.exists(ANDROID_CONTROL_MODE)) {
3388            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3389            rc = extractSceneMode(meta, metaMode, mParameters);
3390            if (rc != NO_ERROR) {
3391                LOGE("extractSceneMode failed");
3392            }
3393        }
3394
3395        //TODO: validate the arguments, HSV scenemode should have only the
3396        //advertised fps ranges
3397
3398        /*set the capture intent, hal version, tintless, stream info,
3399         *and disenable parameters to the backend*/
3400        LOGD("set_parms META_STREAM_INFO " );
3401        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3402            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3403                    "Format:%d",
3404                    mStreamConfigInfo.type[i],
3405                    mStreamConfigInfo.stream_sizes[i].width,
3406                    mStreamConfigInfo.stream_sizes[i].height,
3407                    mStreamConfigInfo.postprocess_mask[i],
3408                    mStreamConfigInfo.format[i]);
3409        }
3410        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3411                    mParameters);
3412        if (rc < 0) {
3413            LOGE("set_parms failed for hal version, stream info");
3414        }
3415
3416        cam_dimension_t sensor_dim;
3417        memset(&sensor_dim, 0, sizeof(sensor_dim));
3418        rc = getSensorOutputSize(sensor_dim);
3419        if (rc != NO_ERROR) {
3420            LOGE("Failed to get sensor output size");
3421            pthread_mutex_unlock(&mMutex);
3422            goto error_exit;
3423        }
3424
3425        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3426                gCamCapability[mCameraId]->active_array_size.height,
3427                sensor_dim.width, sensor_dim.height);
3428
3429        /* Set batchmode before initializing channel. Since registerBuffer
3430         * internally initializes some of the channels, better set batchmode
3431         * even before first register buffer */
3432        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3433            it != mStreamInfo.end(); it++) {
3434            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3435            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3436                    && mBatchSize) {
3437                rc = channel->setBatchSize(mBatchSize);
3438                //Disable per frame map unmap for HFR/batchmode case
3439                rc |= channel->setPerFrameMapUnmap(false);
3440                if (NO_ERROR != rc) {
3441                    LOGE("Channel init failed %d", rc);
3442                    pthread_mutex_unlock(&mMutex);
3443                    goto error_exit;
3444                }
3445            }
3446        }
3447
3448        //First initialize all streams
3449        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3450            it != mStreamInfo.end(); it++) {
3451            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3452            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3453               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3454               setEis)
3455                rc = channel->initialize(is_type);
3456            else {
3457                rc = channel->initialize(IS_TYPE_NONE);
3458            }
3459            if (NO_ERROR != rc) {
3460                LOGE("Channel initialization failed %d", rc);
3461                pthread_mutex_unlock(&mMutex);
3462                goto error_exit;
3463            }
3464        }
3465
3466        if (mRawDumpChannel) {
3467            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3468            if (rc != NO_ERROR) {
3469                LOGE("Error: Raw Dump Channel init failed");
3470                pthread_mutex_unlock(&mMutex);
3471                goto error_exit;
3472            }
3473        }
3474        if (mSupportChannel) {
3475            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3476            if (rc < 0) {
3477                LOGE("Support channel initialization failed");
3478                pthread_mutex_unlock(&mMutex);
3479                goto error_exit;
3480            }
3481        }
3482        if (mAnalysisChannel) {
3483            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3484            if (rc < 0) {
3485                LOGE("Analysis channel initialization failed");
3486                pthread_mutex_unlock(&mMutex);
3487                goto error_exit;
3488            }
3489        }
3490        if (mDummyBatchChannel) {
3491            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3492            if (rc < 0) {
3493                LOGE("mDummyBatchChannel setBatchSize failed");
3494                pthread_mutex_unlock(&mMutex);
3495                goto error_exit;
3496            }
3497            rc = mDummyBatchChannel->initialize(is_type);
3498            if (rc < 0) {
3499                LOGE("mDummyBatchChannel initialization failed");
3500                pthread_mutex_unlock(&mMutex);
3501                goto error_exit;
3502            }
3503        }
3504
3505        // Set bundle info
3506        rc = setBundleInfo();
3507        if (rc < 0) {
3508            LOGE("setBundleInfo failed %d", rc);
3509            pthread_mutex_unlock(&mMutex);
3510            goto error_exit;
3511        }
3512
3513        //Then start them.
3514        LOGH("Start META Channel");
3515        rc = mMetadataChannel->start();
3516        if (rc < 0) {
3517            LOGE("META channel start failed");
3518            pthread_mutex_unlock(&mMutex);
3519            goto error_exit;
3520        }
3521
3522        if (mAnalysisChannel) {
3523            rc = mAnalysisChannel->start();
3524            if (rc < 0) {
3525                LOGE("Analysis channel start failed");
3526                mMetadataChannel->stop();
3527                pthread_mutex_unlock(&mMutex);
3528                goto error_exit;
3529            }
3530        }
3531
3532        if (mSupportChannel) {
3533            rc = mSupportChannel->start();
3534            if (rc < 0) {
3535                LOGE("Support channel start failed");
3536                mMetadataChannel->stop();
3537                /* Although support and analysis are mutually exclusive today
3538                   adding it in anycase for future proofing */
3539                if (mAnalysisChannel) {
3540                    mAnalysisChannel->stop();
3541                }
3542                pthread_mutex_unlock(&mMutex);
3543                goto error_exit;
3544            }
3545        }
3546        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3547            it != mStreamInfo.end(); it++) {
3548            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3549            LOGH("Start Processing Channel mask=%d",
3550                     channel->getStreamTypeMask());
3551            rc = channel->start();
3552            if (rc < 0) {
3553                LOGE("channel start failed");
3554                pthread_mutex_unlock(&mMutex);
3555                goto error_exit;
3556            }
3557        }
3558
3559        if (mRawDumpChannel) {
3560            LOGD("Starting raw dump stream");
3561            rc = mRawDumpChannel->start();
3562            if (rc != NO_ERROR) {
3563                LOGE("Error Starting Raw Dump Channel");
3564                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3565                      it != mStreamInfo.end(); it++) {
3566                    QCamera3Channel *channel =
3567                        (QCamera3Channel *)(*it)->stream->priv;
3568                    LOGH("Stopping Processing Channel mask=%d",
3569                        channel->getStreamTypeMask());
3570                    channel->stop();
3571                }
3572                if (mSupportChannel)
3573                    mSupportChannel->stop();
3574                if (mAnalysisChannel) {
3575                    mAnalysisChannel->stop();
3576                }
3577                mMetadataChannel->stop();
3578                pthread_mutex_unlock(&mMutex);
3579                goto error_exit;
3580            }
3581        }
3582
3583        if (mChannelHandle) {
3584
3585            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3586                    mChannelHandle);
3587            if (rc != NO_ERROR) {
3588                LOGE("start_channel failed %d", rc);
3589                pthread_mutex_unlock(&mMutex);
3590                goto error_exit;
3591            }
3592        }
3593
3594
3595        goto no_error;
3596error_exit:
3597        m_perfLock.lock_rel();
3598        return rc;
3599no_error:
3600        m_perfLock.lock_rel();
3601
3602        mWokenUpByDaemon = false;
3603        mPendingLiveRequest = 0;
3604        mFirstConfiguration = false;
3605        enablePowerHint();
3606    }
3607
3608    uint32_t frameNumber = request->frame_number;
3609    cam_stream_ID_t streamID;
3610
3611    if (mFlushPerf) {
3612        //we cannot accept any requests during flush
3613        LOGE("process_capture_request cannot proceed during flush");
3614        pthread_mutex_unlock(&mMutex);
3615        return NO_ERROR; //should return an error
3616    }
3617
3618    if (meta.exists(ANDROID_REQUEST_ID)) {
3619        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3620        mCurrentRequestId = request_id;
3621        LOGD("Received request with id: %d", request_id);
3622    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3623        LOGE("Unable to find request id field, \
3624                & no previous id available");
3625        pthread_mutex_unlock(&mMutex);
3626        return NAME_NOT_FOUND;
3627    } else {
3628        LOGD("Re-using old request id");
3629        request_id = mCurrentRequestId;
3630    }
3631
3632    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3633                                    request->num_output_buffers,
3634                                    request->input_buffer,
3635                                    frameNumber);
3636    // Acquire all request buffers first
3637    streamID.num_streams = 0;
3638    int blob_request = 0;
3639    uint32_t snapshotStreamId = 0;
3640    for (size_t i = 0; i < request->num_output_buffers; i++) {
3641        const camera3_stream_buffer_t& output = request->output_buffers[i];
3642        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3643
3644        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3645            //Call function to store local copy of jpeg data for encode params.
3646            blob_request = 1;
3647            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3648        }
3649
3650        if (output.acquire_fence != -1) {
3651           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3652           close(output.acquire_fence);
3653           if (rc != OK) {
3654              LOGE("sync wait failed %d", rc);
3655              pthread_mutex_unlock(&mMutex);
3656              return rc;
3657           }
3658        }
3659
3660        streamID.streamID[streamID.num_streams] =
3661            channel->getStreamID(channel->getStreamTypeMask());
3662        streamID.num_streams++;
3663
3664        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3665            isVidBufRequested = true;
3666        }
3667    }
3668
3669    if (blob_request) {
3670        KPI_ATRACE_INT("SNAPSHOT", 1);
3671    }
3672    if (blob_request && mRawDumpChannel) {
3673        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3674        streamID.streamID[streamID.num_streams] =
3675            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3676        streamID.num_streams++;
3677    }
3678
3679    if(request->input_buffer == NULL) {
3680        /* Parse the settings:
3681         * - For every request in NORMAL MODE
3682         * - For every request in HFR mode during preview only case
3683         * - For first request of every batch in HFR mode during video
3684         * recording. In batchmode the same settings except frame number is
3685         * repeated in each request of the batch.
3686         */
3687        if (!mBatchSize ||
3688           (mBatchSize && !isVidBufRequested) ||
3689           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3690            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3691            if (rc < 0) {
3692                LOGE("fail to set frame parameters");
3693                pthread_mutex_unlock(&mMutex);
3694                return rc;
3695            }
3696        }
3697        /* For batchMode HFR, setFrameParameters is not called for every
3698         * request. But only frame number of the latest request is parsed.
3699         * Keep track of first and last frame numbers in a batch so that
3700         * metadata for the frame numbers of batch can be duplicated in
3701         * handleBatchMetadta */
3702        if (mBatchSize) {
3703            if (!mToBeQueuedVidBufs) {
3704                //start of the batch
3705                mFirstFrameNumberInBatch = request->frame_number;
3706            }
3707            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3708                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3709                LOGE("Failed to set the frame number in the parameters");
3710                return BAD_VALUE;
3711            }
3712        }
3713        if (mNeedSensorRestart) {
3714            /* Unlock the mutex as restartSensor waits on the channels to be
3715             * stopped, which in turn calls stream callback functions -
3716             * handleBufferWithLock and handleMetadataWithLock */
3717            pthread_mutex_unlock(&mMutex);
3718            rc = dynamicUpdateMetaStreamInfo();
3719            if (rc != NO_ERROR) {
3720                LOGE("Restarting the sensor failed");
3721                return BAD_VALUE;
3722            }
3723            mNeedSensorRestart = false;
3724            pthread_mutex_lock(&mMutex);
3725        }
3726    } else {
3727
3728        if (request->input_buffer->acquire_fence != -1) {
3729           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3730           close(request->input_buffer->acquire_fence);
3731           if (rc != OK) {
3732              LOGE("input buffer sync wait failed %d", rc);
3733              pthread_mutex_unlock(&mMutex);
3734              return rc;
3735           }
3736        }
3737    }
3738
3739    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3740        mLastCustIntentFrmNum = frameNumber;
3741    }
3742    /* Update pending request list and pending buffers map */
3743    PendingRequestInfo pendingRequest;
3744    pendingRequestIterator latestRequest;
3745    pendingRequest.frame_number = frameNumber;
3746    pendingRequest.num_buffers = request->num_output_buffers;
3747    pendingRequest.request_id = request_id;
3748    pendingRequest.blob_request = blob_request;
3749    pendingRequest.timestamp = 0;
3750    pendingRequest.bUrgentReceived = 0;
3751    if (request->input_buffer) {
3752        pendingRequest.input_buffer =
3753                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3754        *(pendingRequest.input_buffer) = *(request->input_buffer);
3755        pInputBuffer = pendingRequest.input_buffer;
3756    } else {
3757       pendingRequest.input_buffer = NULL;
3758       pInputBuffer = NULL;
3759    }
3760
3761    pendingRequest.pipeline_depth = 0;
3762    pendingRequest.partial_result_cnt = 0;
3763    extractJpegMetadata(mCurJpegMeta, request);
3764    pendingRequest.jpegMetadata = mCurJpegMeta;
3765    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3766    pendingRequest.shutter_notified = false;
3767
3768    //extract capture intent
3769    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3770        mCaptureIntent =
3771                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3772    }
3773    pendingRequest.capture_intent = mCaptureIntent;
3774
3775    //extract CAC info
3776    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
3777        mCacMode =
3778                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
3779    }
3780    pendingRequest.fwkCacMode = mCacMode;
3781
3782    for (size_t i = 0; i < request->num_output_buffers; i++) {
3783        RequestedBufferInfo requestedBuf;
3784        memset(&requestedBuf, 0, sizeof(requestedBuf));
3785        requestedBuf.stream = request->output_buffers[i].stream;
3786        requestedBuf.buffer = NULL;
3787        pendingRequest.buffers.push_back(requestedBuf);
3788
3789        // Add to buffer handle the pending buffers list
3790        PendingBufferInfo bufferInfo;
3791        bufferInfo.frame_number = frameNumber;
3792        bufferInfo.buffer = request->output_buffers[i].buffer;
3793        bufferInfo.stream = request->output_buffers[i].stream;
3794        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3795        mPendingBuffersMap.num_buffers++;
3796        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3797        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3798                 frameNumber, bufferInfo.buffer,
3799                channel->getStreamTypeMask(), bufferInfo.stream->format);
3800    }
3801    LOGD("mPendingBuffersMap.num_buffers = %d", mPendingBuffersMap.num_buffers);
3802    latestRequest = mPendingRequestsList.insert(
3803            mPendingRequestsList.end(), pendingRequest);
3804    if(mFlush) {
3805        pthread_mutex_unlock(&mMutex);
3806        return NO_ERROR;
3807    }
3808
3809    // Notify metadata channel we receive a request
3810    mMetadataChannel->request(NULL, frameNumber);
3811
3812    if(request->input_buffer != NULL){
3813        LOGD("Input request, frame_number %d", frameNumber);
3814        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3815        if (NO_ERROR != rc) {
3816            LOGE("fail to set reproc parameters");
3817            pthread_mutex_unlock(&mMutex);
3818            return rc;
3819        }
3820    }
3821
3822    // Call request on other streams
3823    uint32_t streams_need_metadata = 0;
3824    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3825    for (size_t i = 0; i < request->num_output_buffers; i++) {
3826        const camera3_stream_buffer_t& output = request->output_buffers[i];
3827        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3828
3829        if (channel == NULL) {
3830            LOGW("invalid channel pointer for stream");
3831            continue;
3832        }
3833
3834        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3835            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
3836                      output.buffer, request->input_buffer, frameNumber);
3837            if(request->input_buffer != NULL){
3838                rc = channel->request(output.buffer, frameNumber,
3839                        pInputBuffer, &mReprocMeta);
3840                if (rc < 0) {
3841                    LOGE("Fail to request on picture channel");
3842                    pthread_mutex_unlock(&mMutex);
3843                    return rc;
3844                }
3845            } else {
3846                LOGD("snapshot request with buffer %p, frame_number %d",
3847                         output.buffer, frameNumber);
3848                if (!request->settings) {
3849                    rc = channel->request(output.buffer, frameNumber,
3850                            NULL, mPrevParameters);
3851                } else {
3852                    rc = channel->request(output.buffer, frameNumber,
3853                            NULL, mParameters);
3854                }
3855                if (rc < 0) {
3856                    LOGE("Fail to request on picture channel");
3857                    pthread_mutex_unlock(&mMutex);
3858                    return rc;
3859                }
3860                pendingBufferIter->need_metadata = true;
3861                streams_need_metadata++;
3862            }
3863        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3864            bool needMetadata = false;
3865
3866            if (m_perfLock.isPerfLockTimedAcquired()) {
3867                if (m_perfLock.isTimerReset())
3868                {
3869                    m_perfLock.lock_rel_timed();
3870                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3871                }
3872            } else {
3873                m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3874            }
3875
3876            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3877            rc = yuvChannel->request(output.buffer, frameNumber,
3878                    pInputBuffer,
3879                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3880            if (rc < 0) {
3881                LOGE("Fail to request on YUV channel");
3882                pthread_mutex_unlock(&mMutex);
3883                return rc;
3884            }
3885            pendingBufferIter->need_metadata = needMetadata;
3886            if (needMetadata)
3887                streams_need_metadata += 1;
3888            LOGD("calling YUV channel request, need_metadata is %d",
3889                     needMetadata);
3890        } else {
3891            LOGD("request with buffer %p, frame_number %d",
3892                  output.buffer, frameNumber);
3893            /* Set perf lock for API-2 zsl */
3894            if (IS_USAGE_ZSL(output.stream->usage)) {
3895                if (m_perfLock.isPerfLockTimedAcquired()) {
3896                    if (m_perfLock.isTimerReset())
3897                    {
3898                        m_perfLock.lock_rel_timed();
3899                        m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3900                    }
3901                } else {
3902                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3903                }
3904            }
3905
3906            rc = channel->request(output.buffer, frameNumber);
3907            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3908                    && mBatchSize) {
3909                mToBeQueuedVidBufs++;
3910                if (mToBeQueuedVidBufs == mBatchSize) {
3911                    channel->queueBatchBuf();
3912                }
3913            }
3914            if (rc < 0) {
3915                LOGE("request failed");
3916                pthread_mutex_unlock(&mMutex);
3917                return rc;
3918            }
3919        }
3920        pendingBufferIter++;
3921    }
3922
3923    //If 2 streams have need_metadata set to true, fail the request, unless
3924    //we copy/reference count the metadata buffer
3925    if (streams_need_metadata > 1) {
3926        LOGE("not supporting request in which two streams requires"
3927                " 2 HAL metadata for reprocessing");
3928        pthread_mutex_unlock(&mMutex);
3929        return -EINVAL;
3930    }
3931
3932    if(request->input_buffer == NULL) {
3933        /* Set the parameters to backend:
3934         * - For every request in NORMAL MODE
3935         * - For every request in HFR mode during preview only case
3936         * - Once every batch in HFR mode during video recording
3937         */
3938        if (!mBatchSize ||
3939           (mBatchSize && !isVidBufRequested) ||
3940           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3941            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3942                     mBatchSize, isVidBufRequested,
3943                    mToBeQueuedVidBufs);
3944            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3945                    mParameters);
3946            if (rc < 0) {
3947                LOGE("set_parms failed");
3948            }
3949            /* reset to zero coz, the batch is queued */
3950            mToBeQueuedVidBufs = 0;
3951            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3952        }
3953        mPendingLiveRequest++;
3954    }
3955
3956    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3957
3958    mState = STARTED;
3959    // Added a timed condition wait
3960    struct timespec ts;
3961    uint8_t isValidTimeout = 1;
3962    rc = clock_gettime(CLOCK_REALTIME, &ts);
3963    if (rc < 0) {
3964      isValidTimeout = 0;
3965      LOGE("Error reading the real time clock!!");
3966    }
3967    else {
3968      // Make timeout as 5 sec for request to be honored
3969      ts.tv_sec += 5;
3970    }
3971    //Block on conditional variable
3972    if (mBatchSize) {
3973        /* For HFR, more buffers are dequeued upfront to improve the performance */
3974        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3975        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3976    }
3977    if (m_perfLock.isPerfLockTimedAcquired() && m_perfLock.isTimerReset())
3978        m_perfLock.lock_rel_timed();
3979
3980    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
3981            (mState != ERROR) && (mState != DEINIT)) {
3982        if (!isValidTimeout) {
3983            LOGD("Blocking on conditional wait");
3984            pthread_cond_wait(&mRequestCond, &mMutex);
3985        }
3986        else {
3987            LOGD("Blocking on timed conditional wait");
3988            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3989            if (rc == ETIMEDOUT) {
3990                rc = -ENODEV;
3991                LOGE("Unblocked on timeout!!!!");
3992                break;
3993            }
3994        }
3995        LOGD("Unblocked");
3996        if (mWokenUpByDaemon) {
3997            mWokenUpByDaemon = false;
3998            if (mPendingLiveRequest < maxInFlightRequests)
3999                break;
4000        }
4001    }
4002    pthread_mutex_unlock(&mMutex);
4003
4004    return rc;
4005}
4006
4007/*===========================================================================
4008 * FUNCTION   : dump
4009 *
4010 * DESCRIPTION:
4011 *
4012 * PARAMETERS :
4013 *
4014 *
4015 * RETURN     :
4016 *==========================================================================*/
4017void QCamera3HardwareInterface::dump(int fd)
4018{
4019    pthread_mutex_lock(&mMutex);
4020    dprintf(fd, "\n Camera HAL3 information Begin \n");
4021
4022    dprintf(fd, "\nNumber of pending requests: %zu \n",
4023        mPendingRequestsList.size());
4024    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4025    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4026    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4027    for(pendingRequestIterator i = mPendingRequestsList.begin();
4028            i != mPendingRequestsList.end(); i++) {
4029        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4030        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4031        i->input_buffer);
4032    }
4033    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4034                mPendingBuffersMap.num_buffers);
4035    dprintf(fd, "-------+------------------\n");
4036    dprintf(fd, " Frame | Stream type mask \n");
4037    dprintf(fd, "-------+------------------\n");
4038    for(List<PendingBufferInfo>::iterator i =
4039        mPendingBuffersMap.mPendingBufferList.begin();
4040        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
4041        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
4042        dprintf(fd, " %5d | %11d \n",
4043                i->frame_number, channel->getStreamTypeMask());
4044    }
4045    dprintf(fd, "-------+------------------\n");
4046
4047    dprintf(fd, "\nPending frame drop list: %zu\n",
4048        mPendingFrameDropList.size());
4049    dprintf(fd, "-------+-----------\n");
4050    dprintf(fd, " Frame | Stream ID \n");
4051    dprintf(fd, "-------+-----------\n");
4052    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4053        i != mPendingFrameDropList.end(); i++) {
4054        dprintf(fd, " %5d | %9d \n",
4055            i->frame_number, i->stream_ID);
4056    }
4057    dprintf(fd, "-------+-----------\n");
4058
4059    dprintf(fd, "\n Camera HAL3 information End \n");
4060
4061    /* use dumpsys media.camera as trigger to send update debug level event */
4062    mUpdateDebugLevel = true;
4063    pthread_mutex_unlock(&mMutex);
4064    return;
4065}
4066
4067/*===========================================================================
4068 * FUNCTION   : flush
4069 *
4070 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4071 *              conditionally restarts channels
4072 *
4073 * PARAMETERS :
4074 *  @ restartChannels: re-start all channels
4075 *
4076 *
4077 * RETURN     :
4078 *          0 on success
4079 *          Error code on failure
4080 *==========================================================================*/
4081int QCamera3HardwareInterface::flush(bool restartChannels)
4082{
4083    KPI_ATRACE_CALL();
4084    int32_t rc = NO_ERROR;
4085
4086    LOGD("Unblocking Process Capture Request");
4087    pthread_mutex_lock(&mMutex);
4088    mFlush = true;
4089    pthread_mutex_unlock(&mMutex);
4090
4091    rc = stopAllChannels();
4092    if (rc < 0) {
4093        LOGE("stopAllChannels failed");
4094        return rc;
4095    }
4096    if (mChannelHandle) {
4097        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4098                mChannelHandle);
4099    }
4100
4101    // Reset bundle info
4102    rc = setBundleInfo();
4103    if (rc < 0) {
4104        LOGE("setBundleInfo failed %d", rc);
4105        return rc;
4106    }
4107
4108    // Mutex Lock
4109    pthread_mutex_lock(&mMutex);
4110
4111    // Unblock process_capture_request
4112    mPendingLiveRequest = 0;
4113    pthread_cond_signal(&mRequestCond);
4114
4115    rc = notifyErrorForPendingRequests();
4116    if (rc < 0) {
4117        LOGE("notifyErrorForPendingRequests failed");
4118        pthread_mutex_unlock(&mMutex);
4119        return rc;
4120    }
4121
4122    mFlush = false;
4123
4124    // Start the Streams/Channels
4125    if (restartChannels) {
4126        rc = startAllChannels();
4127        if (rc < 0) {
4128            LOGE("startAllChannels failed");
4129            pthread_mutex_unlock(&mMutex);
4130            return rc;
4131        }
4132    }
4133
4134    if (mChannelHandle) {
4135        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4136                    mChannelHandle);
4137        if (rc < 0) {
4138            LOGE("start_channel failed");
4139            pthread_mutex_unlock(&mMutex);
4140            return rc;
4141        }
4142    }
4143
4144    pthread_mutex_unlock(&mMutex);
4145
4146    return 0;
4147}
4148
4149/*===========================================================================
4150 * FUNCTION   : flushPerf
4151 *
4152 * DESCRIPTION: This is the performance optimization version of flush that does
4153 *              not use stream off, rather flushes the system
4154 *
4155 * PARAMETERS :
4156 *
4157 *
4158 * RETURN     : 0 : success
4159 *              -EINVAL: input is malformed (device is not valid)
4160 *              -ENODEV: if the device has encountered a serious error
4161 *==========================================================================*/
4162int QCamera3HardwareInterface::flushPerf()
4163{
4164    ATRACE_CALL();
4165    int32_t rc = 0;
4166    struct timespec timeout;
4167    bool timed_wait = false;
4168    FlushMap flushMap;
4169
4170    pthread_mutex_lock(&mMutex);
4171    mFlushPerf = true;
4172
4173    /* send the flush event to the backend */
4174    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4175    if (rc < 0) {
4176        LOGE("Error in flush: IOCTL failure");
4177        mFlushPerf = false;
4178        pthread_mutex_unlock(&mMutex);
4179        return -ENODEV;
4180    }
4181
4182    if (mPendingBuffersMap.num_buffers == 0) {
4183        LOGD("No pending buffers in the HAL, return flush");
4184        mFlushPerf = false;
4185        pthread_mutex_unlock(&mMutex);
4186        return rc;
4187    }
4188
4189    /* wait on a signal that buffers were received */
4190    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4191    if (rc < 0) {
4192        LOGE("Error reading the real time clock, cannot use timed wait");
4193    } else {
4194        timeout.tv_sec += FLUSH_TIMEOUT;
4195        timed_wait = true;
4196    }
4197
4198    //Block on conditional variable
4199    while (mPendingBuffersMap.num_buffers != 0) {
4200        LOGD("Waiting on mBuffersCond");
4201        if (!timed_wait) {
4202            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4203            if (rc != 0) {
4204                 LOGE("pthread_cond_wait failed due to rc = %s",
4205                        strerror(rc));
4206                 break;
4207            }
4208        } else {
4209            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4210            if (rc != 0) {
4211                LOGE("pthread_cond_timedwait failed due to rc = %s",
4212                            strerror(rc));
4213                break;
4214            }
4215        }
4216    }
4217    if (rc != 0) {
4218        mFlushPerf = false;
4219        pthread_mutex_unlock(&mMutex);
4220        return -ENODEV;
4221    }
4222
4223    LOGD("Received buffers, now safe to return them");
4224
4225    //make sure the channels handle flush
4226    //currently only required for the picture channel to release snapshot resources
4227    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4228            it != mStreamInfo.end(); it++) {
4229        QCamera3Channel *channel = (*it)->channel;
4230        if (channel) {
4231            rc = channel->flush();
4232            if (rc) {
4233               LOGE("Flushing the channels failed with error %d", rc);
4234               // even though the channel flush failed we need to continue and
4235               // return the buffers we have to the framework, however the return
4236               // value will be an error
4237               rc = -ENODEV;
4238            }
4239        }
4240    }
4241
4242    /* notify the frameworks and send errored results */
4243    rc = notifyErrorForPendingRequests();
4244    if (rc < 0) {
4245        LOGE("notifyErrorForPendingRequests failed");
4246        pthread_mutex_unlock(&mMutex);
4247        return rc;
4248    }
4249
4250    //unblock process_capture_request
4251    mPendingLiveRequest = 0;
4252    unblockRequestIfNecessary();
4253
4254    mFlushPerf = false;
4255    pthread_mutex_unlock(&mMutex);
4256    return rc;
4257}
4258
4259/*===========================================================================
4260 * FUNCTION   : handleCameraDeviceError
4261 *
4262 * DESCRIPTION: This function calls internal flush and notifies the error to
4263 *              framework and updates the state variable.
4264 *
4265 * PARAMETERS : None
4266 *
4267 * RETURN     : NO_ERROR on Success
4268 *              Error code on failure
4269 *==========================================================================*/
4270int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4271{
4272    int32_t rc = NO_ERROR;
4273
4274    pthread_mutex_lock(&mMutex);
4275    if (mState != ERROR) {
4276        //if mState != ERROR, nothing to be done
4277        pthread_mutex_unlock(&mMutex);
4278        return NO_ERROR;
4279    }
4280    pthread_mutex_unlock(&mMutex);
4281
4282    rc = flush(false /* restart channels */);
4283    if (NO_ERROR != rc) {
4284        LOGE("internal flush to handle mState = ERROR failed");
4285    }
4286
4287    pthread_mutex_lock(&mMutex);
4288    mState = DEINIT;
4289    pthread_mutex_unlock(&mMutex);
4290
4291    camera3_notify_msg_t notify_msg;
4292    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4293    notify_msg.type = CAMERA3_MSG_ERROR;
4294    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4295    notify_msg.message.error.error_stream = NULL;
4296    notify_msg.message.error.frame_number = 0;
4297    mCallbackOps->notify(mCallbackOps, &notify_msg);
4298
4299    return rc;
4300}
4301
4302/*===========================================================================
4303 * FUNCTION   : captureResultCb
4304 *
4305 * DESCRIPTION: Callback handler for all capture result
4306 *              (streams, as well as metadata)
4307 *
4308 * PARAMETERS :
4309 *   @metadata : metadata information
4310 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4311 *               NULL if metadata.
4312 *
4313 * RETURN     : NONE
4314 *==========================================================================*/
4315void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4316                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4317{
4318    if (metadata_buf) {
4319        if (mBatchSize) {
4320            handleBatchMetadata(metadata_buf,
4321                    true /* free_and_bufdone_meta_buf */);
4322        } else { /* mBatchSize = 0 */
4323            hdrPlusPerfLock(metadata_buf);
4324            pthread_mutex_lock(&mMutex);
4325            handleMetadataWithLock(metadata_buf,
4326                    true /* free_and_bufdone_meta_buf */);
4327            pthread_mutex_unlock(&mMutex);
4328        }
4329    } else if (isInputBuffer) {
4330        pthread_mutex_lock(&mMutex);
4331        handleInputBufferWithLock(frame_number);
4332        pthread_mutex_unlock(&mMutex);
4333    } else {
4334        pthread_mutex_lock(&mMutex);
4335        handleBufferWithLock(buffer, frame_number);
4336        pthread_mutex_unlock(&mMutex);
4337    }
4338    return;
4339}
4340
4341/*===========================================================================
4342 * FUNCTION   : getReprocessibleOutputStreamId
4343 *
4344 * DESCRIPTION: Get source output stream id for the input reprocess stream
4345 *              based on size and format, which would be the largest
4346 *              output stream if an input stream exists.
4347 *
4348 * PARAMETERS :
4349 *   @id      : return the stream id if found
4350 *
4351 * RETURN     : int32_t type of status
4352 *              NO_ERROR  -- success
4353 *              none-zero failure code
4354 *==========================================================================*/
4355int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4356{
4357    /* check if any output or bidirectional stream with the same size and format
4358       and return that stream */
4359    if ((mInputStreamInfo.dim.width > 0) &&
4360            (mInputStreamInfo.dim.height > 0)) {
4361        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4362                it != mStreamInfo.end(); it++) {
4363
4364            camera3_stream_t *stream = (*it)->stream;
4365            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4366                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4367                    (stream->format == mInputStreamInfo.format)) {
4368                // Usage flag for an input stream and the source output stream
4369                // may be different.
4370                LOGD("Found reprocessible output stream! %p", *it);
4371                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4372                         stream->usage, mInputStreamInfo.usage);
4373
4374                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4375                if (channel != NULL && channel->mStreams[0]) {
4376                    id = channel->mStreams[0]->getMyServerID();
4377                    return NO_ERROR;
4378                }
4379            }
4380        }
4381    } else {
4382        LOGD("No input stream, so no reprocessible output stream");
4383    }
4384    return NAME_NOT_FOUND;
4385}
4386
4387/*===========================================================================
4388 * FUNCTION   : lookupFwkName
4389 *
4390 * DESCRIPTION: In case the enum is not same in fwk and backend
4391 *              make sure the parameter is correctly propogated
4392 *
4393 * PARAMETERS  :
4394 *   @arr      : map between the two enums
4395 *   @len      : len of the map
4396 *   @hal_name : name of the hal_parm to map
4397 *
4398 * RETURN     : int type of status
4399 *              fwk_name  -- success
4400 *              none-zero failure code
4401 *==========================================================================*/
4402template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4403        size_t len, halType hal_name)
4404{
4405
4406    for (size_t i = 0; i < len; i++) {
4407        if (arr[i].hal_name == hal_name) {
4408            return arr[i].fwk_name;
4409        }
4410    }
4411
4412    /* Not able to find matching framework type is not necessarily
4413     * an error case. This happens when mm-camera supports more attributes
4414     * than the frameworks do */
4415    LOGH("Cannot find matching framework type");
4416    return NAME_NOT_FOUND;
4417}
4418
4419/*===========================================================================
4420 * FUNCTION   : lookupHalName
4421 *
4422 * DESCRIPTION: In case the enum is not same in fwk and backend
4423 *              make sure the parameter is correctly propogated
4424 *
4425 * PARAMETERS  :
4426 *   @arr      : map between the two enums
4427 *   @len      : len of the map
4428 *   @fwk_name : name of the hal_parm to map
4429 *
4430 * RETURN     : int32_t type of status
4431 *              hal_name  -- success
4432 *              none-zero failure code
4433 *==========================================================================*/
4434template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4435        size_t len, fwkType fwk_name)
4436{
4437    for (size_t i = 0; i < len; i++) {
4438        if (arr[i].fwk_name == fwk_name) {
4439            return arr[i].hal_name;
4440        }
4441    }
4442
4443    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4444    return NAME_NOT_FOUND;
4445}
4446
4447/*===========================================================================
4448 * FUNCTION   : lookupProp
4449 *
4450 * DESCRIPTION: lookup a value by its name
4451 *
4452 * PARAMETERS :
4453 *   @arr     : map between the two enums
4454 *   @len     : size of the map
4455 *   @name    : name to be looked up
4456 *
4457 * RETURN     : Value if found
4458 *              CAM_CDS_MODE_MAX if not found
4459 *==========================================================================*/
4460template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4461        size_t len, const char *name)
4462{
4463    if (name) {
4464        for (size_t i = 0; i < len; i++) {
4465            if (!strcmp(arr[i].desc, name)) {
4466                return arr[i].val;
4467            }
4468        }
4469    }
4470    return CAM_CDS_MODE_MAX;
4471}
4472
4473/*===========================================================================
4474 *
4475 * DESCRIPTION:
4476 *
4477 * PARAMETERS :
4478 *   @metadata : metadata information from callback
4479 *   @timestamp: metadata buffer timestamp
4480 *   @request_id: request id
4481 *   @jpegMetadata: additional jpeg metadata
4482 *   @pprocDone: whether internal offline postprocsesing is done
4483 *
4484 * RETURN     : camera_metadata_t*
4485 *              metadata in a format specified by fwk
4486 *==========================================================================*/
4487camera_metadata_t*
4488QCamera3HardwareInterface::translateFromHalMetadata(
4489                                 metadata_buffer_t *metadata,
4490                                 nsecs_t timestamp,
4491                                 int32_t request_id,
4492                                 const CameraMetadata& jpegMetadata,
4493                                 uint8_t pipeline_depth,
4494                                 uint8_t capture_intent,
4495                                 bool pprocDone,
4496                                 uint8_t fwk_cacMode)
4497{
4498    CameraMetadata camMetadata;
4499    camera_metadata_t *resultMetadata;
4500
4501    if (jpegMetadata.entryCount())
4502        camMetadata.append(jpegMetadata);
4503
4504    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4505    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4506    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4507    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4508
4509    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4510        int64_t fwk_frame_number = *frame_number;
4511        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4512    }
4513
4514    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4515        int32_t fps_range[2];
4516        fps_range[0] = (int32_t)float_range->min_fps;
4517        fps_range[1] = (int32_t)float_range->max_fps;
4518        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4519                                      fps_range, 2);
4520        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4521             fps_range[0], fps_range[1]);
4522    }
4523
4524    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4525        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4526    }
4527
4528    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4529        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4530                METADATA_MAP_SIZE(SCENE_MODES_MAP),
4531                *sceneMode);
4532        if (NAME_NOT_FOUND != val) {
4533            uint8_t fwkSceneMode = (uint8_t)val;
4534            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4535            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4536                     fwkSceneMode);
4537        }
4538    }
4539
4540    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4541        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4542        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4543    }
4544
4545    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4546        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4547        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4548    }
4549
4550    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4551        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4552        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4553    }
4554
4555    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4556            CAM_INTF_META_EDGE_MODE, metadata) {
4557        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4558    }
4559
4560    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4561        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4562        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4563    }
4564
4565    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4566        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4567    }
4568
4569    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4570        if (0 <= *flashState) {
4571            uint8_t fwk_flashState = (uint8_t) *flashState;
4572            if (!gCamCapability[mCameraId]->flash_available) {
4573                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4574            }
4575            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4576        }
4577    }
4578
4579    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4580        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4581        if (NAME_NOT_FOUND != val) {
4582            uint8_t fwk_flashMode = (uint8_t)val;
4583            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4584        }
4585    }
4586
4587    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4588        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4589        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4590    }
4591
4592    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4593        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4594    }
4595
4596    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4597        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4598    }
4599
4600    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4601        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4602    }
4603
4604    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4605        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4606        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4607    }
4608
4609    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4610        uint8_t fwk_videoStab = (uint8_t) *videoStab;
4611        LOGD("fwk_videoStab = %d", fwk_videoStab);
4612        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4613    } else {
4614        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
4615        // and so hardcoding the Video Stab result to OFF mode.
4616        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4617        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
4618        LOGD("%s: EIS result default to OFF mode", __func__);
4619    }
4620
4621    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4622        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4623        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4624    }
4625
4626    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4627        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4628    }
4629
4630    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4631        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4632
4633        LOGD("dynamicblackLevel = %f %f %f %f",
4634          blackLevelSourcePattern->cam_black_level[0],
4635          blackLevelSourcePattern->cam_black_level[1],
4636          blackLevelSourcePattern->cam_black_level[2],
4637          blackLevelSourcePattern->cam_black_level[3]);
4638    }
4639
4640    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4641        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4642        float fwk_blackLevelInd[4];
4643
4644        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4645        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4646        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4647        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4648
4649        LOGD("applied dynamicblackLevel = %f %f %f %f",
4650          blackLevelAppliedPattern->cam_black_level[0],
4651          blackLevelAppliedPattern->cam_black_level[1],
4652          blackLevelAppliedPattern->cam_black_level[2],
4653          blackLevelAppliedPattern->cam_black_level[3]);
4654        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4655
4656        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
4657        // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4658        // depth space.
4659        fwk_blackLevelInd[0] /= 64.0;
4660        fwk_blackLevelInd[1] /= 64.0;
4661        fwk_blackLevelInd[2] /= 64.0;
4662        fwk_blackLevelInd[3] /= 64.0;
4663        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4664    }
4665
4666    // Fixed whitelevel is used by ISP/Sensor
4667    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4668            &gCamCapability[mCameraId]->white_level, 1);
4669
4670    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4671            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4672        int32_t scalerCropRegion[4];
4673        scalerCropRegion[0] = hScalerCropRegion->left;
4674        scalerCropRegion[1] = hScalerCropRegion->top;
4675        scalerCropRegion[2] = hScalerCropRegion->width;
4676        scalerCropRegion[3] = hScalerCropRegion->height;
4677
4678        // Adjust crop region from sensor output coordinate system to active
4679        // array coordinate system.
4680        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4681                scalerCropRegion[2], scalerCropRegion[3]);
4682
4683        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4684    }
4685
4686    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4687        LOGD("sensorExpTime = %lld", *sensorExpTime);
4688        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4689    }
4690
4691    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4692            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4693        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
4694        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4695    }
4696
4697    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4698            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4699        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
4700        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4701                sensorRollingShutterSkew, 1);
4702    }
4703
4704    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4705        LOGD("sensorSensitivity = %d", *sensorSensitivity);
4706        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4707
4708        //calculate the noise profile based on sensitivity
4709        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4710        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4711        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4712        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4713            noise_profile[i]   = noise_profile_S;
4714            noise_profile[i+1] = noise_profile_O;
4715        }
4716        LOGD("noise model entry (S, O) is (%f, %f)",
4717                noise_profile_S, noise_profile_O);
4718        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4719                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4720    }
4721
4722    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
4723        int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
4724        camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
4725    }
4726
4727    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4728        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4729        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4730    }
4731
4732    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4733        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4734                *faceDetectMode);
4735        if (NAME_NOT_FOUND != val) {
4736            uint8_t fwk_faceDetectMode = (uint8_t)val;
4737            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4738
4739            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4740                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4741                        CAM_INTF_META_FACE_DETECTION, metadata) {
4742                    uint8_t numFaces = MIN(
4743                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4744                    int32_t faceIds[MAX_ROI];
4745                    uint8_t faceScores[MAX_ROI];
4746                    int32_t faceRectangles[MAX_ROI * 4];
4747                    int32_t faceLandmarks[MAX_ROI * 6];
4748                    size_t j = 0, k = 0;
4749
4750                    for (size_t i = 0; i < numFaces; i++) {
4751                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4752                        // Adjust crop region from sensor output coordinate system to active
4753                        // array coordinate system.
4754                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4755                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4756                                rect.width, rect.height);
4757
4758                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4759                                faceRectangles+j, -1);
4760
4761                        j+= 4;
4762                    }
4763                    if (numFaces <= 0) {
4764                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4765                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4766                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4767                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4768                    }
4769
4770                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4771                            numFaces);
4772                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4773                            faceRectangles, numFaces * 4U);
4774                    if (fwk_faceDetectMode ==
4775                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4776                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
4777                                CAM_INTF_META_FACE_LANDMARK, metadata) {
4778
4779                            for (size_t i = 0; i < numFaces; i++) {
4780                                // Map the co-ordinate sensor output coordinate system to active
4781                                // array coordinate system.
4782                                mCropRegionMapper.toActiveArray(
4783                                        landmarks->face_landmarks[i].left_eye_center.x,
4784                                        landmarks->face_landmarks[i].left_eye_center.y);
4785                                mCropRegionMapper.toActiveArray(
4786                                        landmarks->face_landmarks[i].right_eye_center.x,
4787                                        landmarks->face_landmarks[i].right_eye_center.y);
4788                                mCropRegionMapper.toActiveArray(
4789                                        landmarks->face_landmarks[i].mouth_center.x,
4790                                        landmarks->face_landmarks[i].mouth_center.y);
4791
4792                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
4793                                k+= 6;
4794                            }
4795                        }
4796
4797                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4798                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4799                                faceLandmarks, numFaces * 6U);
4800                   }
4801                }
4802            }
4803        }
4804    }
4805
4806    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4807        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4808        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4809    }
4810
4811    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4812            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4813        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4814        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4815    }
4816
4817    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4818            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4819        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4820                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4821    }
4822
4823    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4824            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4825        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4826                CAM_MAX_SHADING_MAP_HEIGHT);
4827        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4828                CAM_MAX_SHADING_MAP_WIDTH);
4829        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4830                lensShadingMap->lens_shading, 4U * map_width * map_height);
4831    }
4832
4833    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4834        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4835        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4836    }
4837
4838    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4839        //Populate CAM_INTF_META_TONEMAP_CURVES
4840        /* ch0 = G, ch 1 = B, ch 2 = R*/
4841        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4842            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
4843                     tonemap->tonemap_points_cnt,
4844                    CAM_MAX_TONEMAP_CURVE_SIZE);
4845            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4846        }
4847
4848        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4849                        &tonemap->curves[0].tonemap_points[0][0],
4850                        tonemap->tonemap_points_cnt * 2);
4851
4852        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4853                        &tonemap->curves[1].tonemap_points[0][0],
4854                        tonemap->tonemap_points_cnt * 2);
4855
4856        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4857                        &tonemap->curves[2].tonemap_points[0][0],
4858                        tonemap->tonemap_points_cnt * 2);
4859    }
4860
4861    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4862            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4863        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4864                CC_GAINS_COUNT);
4865    }
4866
4867    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4868            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4869        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4870                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4871                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4872    }
4873
4874    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4875            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4876        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4877            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
4878                     toneCurve->tonemap_points_cnt,
4879                    CAM_MAX_TONEMAP_CURVE_SIZE);
4880            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4881        }
4882        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4883                (float*)toneCurve->curve.tonemap_points,
4884                toneCurve->tonemap_points_cnt * 2);
4885    }
4886
4887    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4888            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4889        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4890                predColorCorrectionGains->gains, 4);
4891    }
4892
4893    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4894            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4895        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4896                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4897                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4898    }
4899
4900    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4901        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4902    }
4903
4904    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4905        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4906        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4907    }
4908
4909    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4910        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4911        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4912    }
4913
4914    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4915        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4916                *effectMode);
4917        if (NAME_NOT_FOUND != val) {
4918            uint8_t fwk_effectMode = (uint8_t)val;
4919            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4920        }
4921    }
4922
4923    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4924            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4925        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4926                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4927        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4928            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4929        }
4930        int32_t fwk_testPatternData[4];
4931        fwk_testPatternData[0] = testPatternData->r;
4932        fwk_testPatternData[3] = testPatternData->b;
4933        switch (gCamCapability[mCameraId]->color_arrangement) {
4934        case CAM_FILTER_ARRANGEMENT_RGGB:
4935        case CAM_FILTER_ARRANGEMENT_GRBG:
4936            fwk_testPatternData[1] = testPatternData->gr;
4937            fwk_testPatternData[2] = testPatternData->gb;
4938            break;
4939        case CAM_FILTER_ARRANGEMENT_GBRG:
4940        case CAM_FILTER_ARRANGEMENT_BGGR:
4941            fwk_testPatternData[2] = testPatternData->gr;
4942            fwk_testPatternData[1] = testPatternData->gb;
4943            break;
4944        default:
4945            LOGE("color arrangement %d is not supported",
4946                gCamCapability[mCameraId]->color_arrangement);
4947            break;
4948        }
4949        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4950    }
4951
4952    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4953        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4954    }
4955
4956    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4957        String8 str((const char *)gps_methods);
4958        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4959    }
4960
4961    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4962        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4963    }
4964
4965    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4966        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4967    }
4968
4969    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4970        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4971        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4972    }
4973
4974    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4975        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4976        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4977    }
4978
4979    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4980        int32_t fwk_thumb_size[2];
4981        fwk_thumb_size[0] = thumb_size->width;
4982        fwk_thumb_size[1] = thumb_size->height;
4983        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4984    }
4985
4986    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4987        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4988                privateData,
4989                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4990    }
4991
4992    if (metadata->is_tuning_params_valid) {
4993        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4994        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4995        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4996
4997
4998        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4999                sizeof(uint32_t));
5000        data += sizeof(uint32_t);
5001
5002        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5003                sizeof(uint32_t));
5004        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5005        data += sizeof(uint32_t);
5006
5007        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5008                sizeof(uint32_t));
5009        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5010        data += sizeof(uint32_t);
5011
5012        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5013                sizeof(uint32_t));
5014        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5015        data += sizeof(uint32_t);
5016
5017        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5018                sizeof(uint32_t));
5019        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5020        data += sizeof(uint32_t);
5021
5022        metadata->tuning_params.tuning_mod3_data_size = 0;
5023        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5024                sizeof(uint32_t));
5025        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5026        data += sizeof(uint32_t);
5027
5028        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5029                TUNING_SENSOR_DATA_MAX);
5030        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5031                count);
5032        data += count;
5033
5034        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5035                TUNING_VFE_DATA_MAX);
5036        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5037                count);
5038        data += count;
5039
5040        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5041                TUNING_CPP_DATA_MAX);
5042        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5043                count);
5044        data += count;
5045
5046        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5047                TUNING_CAC_DATA_MAX);
5048        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5049                count);
5050        data += count;
5051
5052        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5053                (int32_t *)(void *)tuning_meta_data_blob,
5054                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5055    }
5056
5057    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5058            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5059        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5060                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5061                NEUTRAL_COL_POINTS);
5062    }
5063
5064    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5065        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5066        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5067    }
5068
5069    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5070        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5071        // Adjust crop region from sensor output coordinate system to active
5072        // array coordinate system.
5073        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5074                hAeRegions->rect.width, hAeRegions->rect.height);
5075
5076        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5077        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5078                REGIONS_TUPLE_COUNT);
5079        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5080                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5081                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5082                hAeRegions->rect.height);
5083    }
5084
5085    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5086        uint8_t fwk_afState = (uint8_t) *afState;
5087        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5088        LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5089    }
5090
5091    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5092        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5093    }
5094
5095    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5096        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5097    }
5098
5099    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5100        uint8_t fwk_lensState = *lensState;
5101        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5102    }
5103
5104    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5105        /*af regions*/
5106        int32_t afRegions[REGIONS_TUPLE_COUNT];
5107        // Adjust crop region from sensor output coordinate system to active
5108        // array coordinate system.
5109        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5110                hAfRegions->rect.width, hAfRegions->rect.height);
5111
5112        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5113        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5114                REGIONS_TUPLE_COUNT);
5115        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5116                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5117                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5118                hAfRegions->rect.height);
5119    }
5120
5121    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5122        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5123                *hal_ab_mode);
5124        if (NAME_NOT_FOUND != val) {
5125            uint8_t fwk_ab_mode = (uint8_t)val;
5126            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5127        }
5128    }
5129
5130    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5131        int val = lookupFwkName(SCENE_MODES_MAP,
5132                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5133        if (NAME_NOT_FOUND != val) {
5134            uint8_t fwkBestshotMode = (uint8_t)val;
5135            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5136            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5137        } else {
5138            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5139        }
5140    }
5141
5142    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5143         uint8_t fwk_mode = (uint8_t) *mode;
5144         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5145    }
5146
5147    /* Constant metadata values to be update*/
5148    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5149    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5150
5151    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5152    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5153
5154    int32_t hotPixelMap[2];
5155    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5156
5157    // CDS
5158    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5159        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5160    }
5161
5162    // TNR
5163    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5164        uint8_t tnr_enable       = tnr->denoise_enable;
5165        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5166
5167        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5168        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5169    }
5170
5171    // Reprocess crop data
5172    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5173        uint8_t cnt = crop_data->num_of_streams;
5174        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5175            // mm-qcamera-daemon only posts crop_data for streams
5176            // not linked to pproc. So no valid crop metadata is not
5177            // necessarily an error case.
5178            LOGD("No valid crop metadata entries");
5179        } else {
5180            uint32_t reproc_stream_id;
5181            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5182                LOGD("No reprocessible stream found, ignore crop data");
5183            } else {
5184                int rc = NO_ERROR;
5185                Vector<int32_t> roi_map;
5186                int32_t *crop = new int32_t[cnt*4];
5187                if (NULL == crop) {
5188                   rc = NO_MEMORY;
5189                }
5190                if (NO_ERROR == rc) {
5191                    int32_t streams_found = 0;
5192                    for (size_t i = 0; i < cnt; i++) {
5193                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5194                            if (pprocDone) {
5195                                // HAL already does internal reprocessing,
5196                                // either via reprocessing before JPEG encoding,
5197                                // or offline postprocessing for pproc bypass case.
5198                                crop[0] = 0;
5199                                crop[1] = 0;
5200                                crop[2] = mInputStreamInfo.dim.width;
5201                                crop[3] = mInputStreamInfo.dim.height;
5202                            } else {
5203                                crop[0] = crop_data->crop_info[i].crop.left;
5204                                crop[1] = crop_data->crop_info[i].crop.top;
5205                                crop[2] = crop_data->crop_info[i].crop.width;
5206                                crop[3] = crop_data->crop_info[i].crop.height;
5207                            }
5208                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5209                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5210                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5211                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5212                            streams_found++;
5213                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5214                                    crop[0], crop[1], crop[2], crop[3]);
5215                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5216                                    crop_data->crop_info[i].roi_map.left,
5217                                    crop_data->crop_info[i].roi_map.top,
5218                                    crop_data->crop_info[i].roi_map.width,
5219                                    crop_data->crop_info[i].roi_map.height);
5220                            break;
5221
5222                       }
5223                    }
5224                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5225                            &streams_found, 1);
5226                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5227                            crop, (size_t)(streams_found * 4));
5228                    if (roi_map.array()) {
5229                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5230                                roi_map.array(), roi_map.size());
5231                    }
5232               }
5233               if (crop) {
5234                   delete [] crop;
5235               }
5236            }
5237        }
5238    }
5239
5240    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5241        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5242        // so hardcoding the CAC result to OFF mode.
5243        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5244        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5245    } else {
5246        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5247            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5248                    *cacMode);
5249            if (NAME_NOT_FOUND != val) {
5250                uint8_t resultCacMode = (uint8_t)val;
5251                // check whether CAC result from CB is equal to Framework set CAC mode
5252                // If not equal then set the CAC mode came in corresponding request
5253                if (fwk_cacMode != resultCacMode) {
5254                    resultCacMode = fwk_cacMode;
5255                }
5256                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5257                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5258            } else {
5259                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5260            }
5261        }
5262    }
5263
5264    // Post blob of cam_cds_data through vendor tag.
5265    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5266        uint8_t cnt = cdsInfo->num_of_streams;
5267        cam_cds_data_t cdsDataOverride;
5268        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5269        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5270        cdsDataOverride.num_of_streams = 1;
5271        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5272            uint32_t reproc_stream_id;
5273            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5274                LOGD("No reprocessible stream found, ignore cds data");
5275            } else {
5276                for (size_t i = 0; i < cnt; i++) {
5277                    if (cdsInfo->cds_info[i].stream_id ==
5278                            reproc_stream_id) {
5279                        cdsDataOverride.cds_info[0].cds_enable =
5280                                cdsInfo->cds_info[i].cds_enable;
5281                        break;
5282                    }
5283                }
5284            }
5285        } else {
5286            LOGD("Invalid stream count %d in CDS_DATA", cnt);
5287        }
5288        camMetadata.update(QCAMERA3_CDS_INFO,
5289                (uint8_t *)&cdsDataOverride,
5290                sizeof(cam_cds_data_t));
5291    }
5292
5293    // Ldaf calibration data
5294    if (!mLdafCalibExist) {
5295        IF_META_AVAILABLE(uint32_t, ldafCalib,
5296                CAM_INTF_META_LDAF_EXIF, metadata) {
5297            mLdafCalibExist = true;
5298            mLdafCalib[0] = ldafCalib[0];
5299            mLdafCalib[1] = ldafCalib[1];
5300            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5301                    ldafCalib[0], ldafCalib[1]);
5302        }
5303    }
5304
5305    resultMetadata = camMetadata.release();
5306    return resultMetadata;
5307}
5308
5309/*===========================================================================
5310 * FUNCTION   : saveExifParams
5311 *
5312 * DESCRIPTION:
5313 *
5314 * PARAMETERS :
5315 *   @metadata : metadata information from callback
5316 *
5317 * RETURN     : none
5318 *
5319 *==========================================================================*/
5320void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5321{
5322    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5323            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5324        if (mExifParams.debug_params) {
5325            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5326            mExifParams.debug_params->ae_debug_params_valid = TRUE;
5327        }
5328    }
5329    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5330            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5331        if (mExifParams.debug_params) {
5332            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5333            mExifParams.debug_params->awb_debug_params_valid = TRUE;
5334        }
5335    }
5336    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5337            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5338        if (mExifParams.debug_params) {
5339            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5340            mExifParams.debug_params->af_debug_params_valid = TRUE;
5341        }
5342    }
5343    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5344            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5345        if (mExifParams.debug_params) {
5346            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5347            mExifParams.debug_params->asd_debug_params_valid = TRUE;
5348        }
5349    }
5350    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5351            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5352        if (mExifParams.debug_params) {
5353            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5354            mExifParams.debug_params->stats_debug_params_valid = TRUE;
5355        }
5356    }
5357}
5358
5359/*===========================================================================
5360 * FUNCTION   : get3AExifParams
5361 *
5362 * DESCRIPTION:
5363 *
5364 * PARAMETERS : none
5365 *
5366 *
5367 * RETURN     : mm_jpeg_exif_params_t
5368 *
5369 *==========================================================================*/
5370mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5371{
5372    return mExifParams;
5373}
5374
5375/*===========================================================================
5376 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
5377 *
5378 * DESCRIPTION:
5379 *
5380 * PARAMETERS :
5381 *   @metadata : metadata information from callback
5382 *
5383 * RETURN     : camera_metadata_t*
5384 *              metadata in a format specified by fwk
5385 *==========================================================================*/
5386camera_metadata_t*
5387QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5388                                (metadata_buffer_t *metadata)
5389{
5390    CameraMetadata camMetadata;
5391    camera_metadata_t *resultMetadata;
5392
5393
5394    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5395        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5396        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5397        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
5398    }
5399
5400    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5401        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5402                &aecTrigger->trigger, 1);
5403        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5404                &aecTrigger->trigger_id, 1);
5405        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5406                 aecTrigger->trigger);
5407        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
5408                aecTrigger->trigger_id);
5409    }
5410
5411    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5412        uint8_t fwk_ae_state = (uint8_t) *ae_state;
5413        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5414        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
5415    }
5416
5417    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5418        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5419        if (NAME_NOT_FOUND != val) {
5420            uint8_t fwkAfMode = (uint8_t)val;
5421            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5422            LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5423        } else {
5424            LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5425                    val);
5426        }
5427    }
5428
5429    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5430        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5431                &af_trigger->trigger, 1);
5432        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5433                 af_trigger->trigger);
5434        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5435        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
5436                af_trigger->trigger_id);
5437    }
5438
5439    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5440        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5441                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5442        if (NAME_NOT_FOUND != val) {
5443            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5444            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5445            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
5446        } else {
5447            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
5448        }
5449    }
5450
5451    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5452    uint32_t aeMode = CAM_AE_MODE_MAX;
5453    int32_t flashMode = CAM_FLASH_MODE_MAX;
5454    int32_t redeye = -1;
5455    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5456        aeMode = *pAeMode;
5457    }
5458    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5459        flashMode = *pFlashMode;
5460    }
5461    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5462        redeye = *pRedeye;
5463    }
5464
5465    if (1 == redeye) {
5466        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5467        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5468    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5469        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5470                flashMode);
5471        if (NAME_NOT_FOUND != val) {
5472            fwk_aeMode = (uint8_t)val;
5473            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5474        } else {
5475            LOGE("Unsupported flash mode %d", flashMode);
5476        }
5477    } else if (aeMode == CAM_AE_MODE_ON) {
5478        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5479        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5480    } else if (aeMode == CAM_AE_MODE_OFF) {
5481        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5482        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5483    } else {
5484        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5485              "flashMode:%d, aeMode:%u!!!",
5486                 redeye, flashMode, aeMode);
5487    }
5488
5489    resultMetadata = camMetadata.release();
5490    return resultMetadata;
5491}
5492
5493/*===========================================================================
5494 * FUNCTION   : dumpMetadataToFile
5495 *
5496 * DESCRIPTION: Dumps tuning metadata to file system
5497 *
5498 * PARAMETERS :
5499 *   @meta           : tuning metadata
5500 *   @dumpFrameCount : current dump frame count
5501 *   @enabled        : Enable mask
5502 *
5503 *==========================================================================*/
5504void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5505                                                   uint32_t &dumpFrameCount,
5506                                                   bool enabled,
5507                                                   const char *type,
5508                                                   uint32_t frameNumber)
5509{
5510    //Some sanity checks
5511    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5512        LOGE("Tuning sensor data size bigger than expected %d: %d",
5513              meta.tuning_sensor_data_size,
5514              TUNING_SENSOR_DATA_MAX);
5515        return;
5516    }
5517
5518    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5519        LOGE("Tuning VFE data size bigger than expected %d: %d",
5520              meta.tuning_vfe_data_size,
5521              TUNING_VFE_DATA_MAX);
5522        return;
5523    }
5524
5525    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5526        LOGE("Tuning CPP data size bigger than expected %d: %d",
5527              meta.tuning_cpp_data_size,
5528              TUNING_CPP_DATA_MAX);
5529        return;
5530    }
5531
5532    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5533        LOGE("Tuning CAC data size bigger than expected %d: %d",
5534              meta.tuning_cac_data_size,
5535              TUNING_CAC_DATA_MAX);
5536        return;
5537    }
5538    //
5539
5540    if(enabled){
5541        char timeBuf[FILENAME_MAX];
5542        char buf[FILENAME_MAX];
5543        memset(buf, 0, sizeof(buf));
5544        memset(timeBuf, 0, sizeof(timeBuf));
5545        time_t current_time;
5546        struct tm * timeinfo;
5547        time (&current_time);
5548        timeinfo = localtime (&current_time);
5549        if (timeinfo != NULL) {
5550            strftime (timeBuf, sizeof(timeBuf),
5551                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5552        }
5553        String8 filePath(timeBuf);
5554        snprintf(buf,
5555                sizeof(buf),
5556                "%dm_%s_%d.bin",
5557                dumpFrameCount,
5558                type,
5559                frameNumber);
5560        filePath.append(buf);
5561        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5562        if (file_fd >= 0) {
5563            ssize_t written_len = 0;
5564            meta.tuning_data_version = TUNING_DATA_VERSION;
5565            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5566            written_len += write(file_fd, data, sizeof(uint32_t));
5567            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5568            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5569            written_len += write(file_fd, data, sizeof(uint32_t));
5570            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5571            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5572            written_len += write(file_fd, data, sizeof(uint32_t));
5573            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5574            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5575            written_len += write(file_fd, data, sizeof(uint32_t));
5576            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5577            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5578            written_len += write(file_fd, data, sizeof(uint32_t));
5579            meta.tuning_mod3_data_size = 0;
5580            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5581            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5582            written_len += write(file_fd, data, sizeof(uint32_t));
5583            size_t total_size = meta.tuning_sensor_data_size;
5584            data = (void *)((uint8_t *)&meta.data);
5585            written_len += write(file_fd, data, total_size);
5586            total_size = meta.tuning_vfe_data_size;
5587            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5588            written_len += write(file_fd, data, total_size);
5589            total_size = meta.tuning_cpp_data_size;
5590            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5591            written_len += write(file_fd, data, total_size);
5592            total_size = meta.tuning_cac_data_size;
5593            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5594            written_len += write(file_fd, data, total_size);
5595            close(file_fd);
5596        }else {
5597            LOGE("fail to open file for metadata dumping");
5598        }
5599    }
5600}
5601
5602/*===========================================================================
5603 * FUNCTION   : cleanAndSortStreamInfo
5604 *
5605 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5606 *              and sort them such that raw stream is at the end of the list
5607 *              This is a workaround for camera daemon constraint.
5608 *
5609 * PARAMETERS : None
5610 *
5611 *==========================================================================*/
5612void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5613{
5614    List<stream_info_t *> newStreamInfo;
5615
5616    /*clean up invalid streams*/
5617    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5618            it != mStreamInfo.end();) {
5619        if(((*it)->status) == INVALID){
5620            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5621            delete channel;
5622            free(*it);
5623            it = mStreamInfo.erase(it);
5624        } else {
5625            it++;
5626        }
5627    }
5628
5629    // Move preview/video/callback/snapshot streams into newList
5630    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5631            it != mStreamInfo.end();) {
5632        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5633                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5634                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5635            newStreamInfo.push_back(*it);
5636            it = mStreamInfo.erase(it);
5637        } else
5638            it++;
5639    }
5640    // Move raw streams into newList
5641    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5642            it != mStreamInfo.end();) {
5643        newStreamInfo.push_back(*it);
5644        it = mStreamInfo.erase(it);
5645    }
5646
5647    mStreamInfo = newStreamInfo;
5648}
5649
5650/*===========================================================================
5651 * FUNCTION   : extractJpegMetadata
5652 *
5653 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5654 *              JPEG metadata is cached in HAL, and return as part of capture
5655 *              result when metadata is returned from camera daemon.
5656 *
5657 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5658 *              @request:      capture request
5659 *
5660 *==========================================================================*/
5661void QCamera3HardwareInterface::extractJpegMetadata(
5662        CameraMetadata& jpegMetadata,
5663        const camera3_capture_request_t *request)
5664{
5665    CameraMetadata frame_settings;
5666    frame_settings = request->settings;
5667
5668    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5669        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5670                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5671                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5672
5673    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5674        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5675                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5676                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5677
5678    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5679        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5680                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5681                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5682
5683    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5684        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5685                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5686                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5687
5688    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5689        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5690                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5691                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5692
5693    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5694        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5695                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5696                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5697
5698    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5699        int32_t thumbnail_size[2];
5700        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5701        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5702        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5703            int32_t orientation =
5704                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5705            if ((orientation == 90) || (orientation == 270)) {
5706               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5707               int32_t temp;
5708               temp = thumbnail_size[0];
5709               thumbnail_size[0] = thumbnail_size[1];
5710               thumbnail_size[1] = temp;
5711            }
5712         }
5713         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5714                thumbnail_size,
5715                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5716    }
5717
5718}
5719
5720/*===========================================================================
5721 * FUNCTION   : convertToRegions
5722 *
5723 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5724 *
5725 * PARAMETERS :
5726 *   @rect   : cam_rect_t struct to convert
5727 *   @region : int32_t destination array
5728 *   @weight : if we are converting from cam_area_t, weight is valid
5729 *             else weight = -1
5730 *
5731 *==========================================================================*/
5732void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5733        int32_t *region, int weight)
5734{
5735    region[0] = rect.left;
5736    region[1] = rect.top;
5737    region[2] = rect.left + rect.width;
5738    region[3] = rect.top + rect.height;
5739    if (weight > -1) {
5740        region[4] = weight;
5741    }
5742}
5743
5744/*===========================================================================
5745 * FUNCTION   : convertFromRegions
5746 *
5747 * DESCRIPTION: helper method to convert from array to cam_rect_t
5748 *
5749 * PARAMETERS :
5750 *   @rect   : cam_rect_t struct to convert
5751 *   @region : int32_t destination array
5752 *   @weight : if we are converting from cam_area_t, weight is valid
5753 *             else weight = -1
5754 *
5755 *==========================================================================*/
5756void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5757        const camera_metadata_t *settings, uint32_t tag)
5758{
5759    CameraMetadata frame_settings;
5760    frame_settings = settings;
5761    int32_t x_min = frame_settings.find(tag).data.i32[0];
5762    int32_t y_min = frame_settings.find(tag).data.i32[1];
5763    int32_t x_max = frame_settings.find(tag).data.i32[2];
5764    int32_t y_max = frame_settings.find(tag).data.i32[3];
5765    roi.weight = frame_settings.find(tag).data.i32[4];
5766    roi.rect.left = x_min;
5767    roi.rect.top = y_min;
5768    roi.rect.width = x_max - x_min;
5769    roi.rect.height = y_max - y_min;
5770}
5771
5772/*===========================================================================
5773 * FUNCTION   : resetIfNeededROI
5774 *
5775 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5776 *              crop region
5777 *
5778 * PARAMETERS :
5779 *   @roi       : cam_area_t struct to resize
5780 *   @scalerCropRegion : cam_crop_region_t region to compare against
5781 *
5782 *
5783 *==========================================================================*/
5784bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5785                                                 const cam_crop_region_t* scalerCropRegion)
5786{
5787    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5788    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5789    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5790    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5791
5792    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5793     * without having this check the calculations below to validate if the roi
5794     * is inside scalar crop region will fail resulting in the roi not being
5795     * reset causing algorithm to continue to use stale roi window
5796     */
5797    if (roi->weight == 0) {
5798        return true;
5799    }
5800
5801    if ((roi_x_max < scalerCropRegion->left) ||
5802        // right edge of roi window is left of scalar crop's left edge
5803        (roi_y_max < scalerCropRegion->top)  ||
5804        // bottom edge of roi window is above scalar crop's top edge
5805        (roi->rect.left > crop_x_max) ||
5806        // left edge of roi window is beyond(right) of scalar crop's right edge
5807        (roi->rect.top > crop_y_max)){
5808        // top edge of roi windo is above scalar crop's top edge
5809        return false;
5810    }
5811    if (roi->rect.left < scalerCropRegion->left) {
5812        roi->rect.left = scalerCropRegion->left;
5813    }
5814    if (roi->rect.top < scalerCropRegion->top) {
5815        roi->rect.top = scalerCropRegion->top;
5816    }
5817    if (roi_x_max > crop_x_max) {
5818        roi_x_max = crop_x_max;
5819    }
5820    if (roi_y_max > crop_y_max) {
5821        roi_y_max = crop_y_max;
5822    }
5823    roi->rect.width = roi_x_max - roi->rect.left;
5824    roi->rect.height = roi_y_max - roi->rect.top;
5825    return true;
5826}
5827
5828/*===========================================================================
5829 * FUNCTION   : convertLandmarks
5830 *
5831 * DESCRIPTION: helper method to extract the landmarks from face detection info
5832 *
5833 * PARAMETERS :
5834 *   @landmark_data : input landmark data to be converted
5835 *   @landmarks : int32_t destination array
5836 *
5837 *
5838 *==========================================================================*/
5839void QCamera3HardwareInterface::convertLandmarks(
5840        cam_face_landmarks_info_t landmark_data,
5841        int32_t *landmarks)
5842{
5843    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
5844    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
5845    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
5846    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
5847    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
5848    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
5849}
5850
5851#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5852/*===========================================================================
5853 * FUNCTION   : initCapabilities
5854 *
5855 * DESCRIPTION: initialize camera capabilities in static data struct
5856 *
5857 * PARAMETERS :
5858 *   @cameraId  : camera Id
5859 *
5860 * RETURN     : int32_t type of status
5861 *              NO_ERROR  -- success
5862 *              none-zero failure code
5863 *==========================================================================*/
5864int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5865{
5866    int rc = 0;
5867    mm_camera_vtbl_t *cameraHandle = NULL;
5868    QCamera3HeapMemory *capabilityHeap = NULL;
5869
5870    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5871    if (rc) {
5872        LOGE("camera_open failed. rc = %d", rc);
5873        goto open_failed;
5874    }
5875    if (!cameraHandle) {
5876        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
5877        goto open_failed;
5878    }
5879
5880    capabilityHeap = new QCamera3HeapMemory(1);
5881    if (capabilityHeap == NULL) {
5882        LOGE("creation of capabilityHeap failed");
5883        goto heap_creation_failed;
5884    }
5885    /* Allocate memory for capability buffer */
5886    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5887    if(rc != OK) {
5888        LOGE("No memory for cappability");
5889        goto allocate_failed;
5890    }
5891
5892    /* Map memory for capability buffer */
5893    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5894    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5895                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5896                                capabilityHeap->getFd(0),
5897                                sizeof(cam_capability_t));
5898    if(rc < 0) {
5899        LOGE("failed to map capability buffer");
5900        goto map_failed;
5901    }
5902
5903    /* Query Capability */
5904    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5905    if(rc < 0) {
5906        LOGE("failed to query capability");
5907        goto query_failed;
5908    }
5909    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5910    if (!gCamCapability[cameraId]) {
5911        LOGE("out of memory");
5912        goto query_failed;
5913    }
5914    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5915                                        sizeof(cam_capability_t));
5916    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_x = 0;
5917    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_y = 0;
5918    rc = 0;
5919
5920query_failed:
5921    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5922                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5923map_failed:
5924    capabilityHeap->deallocate();
5925allocate_failed:
5926    delete capabilityHeap;
5927heap_creation_failed:
5928    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5929    cameraHandle = NULL;
5930open_failed:
5931    return rc;
5932}
5933
5934/*==========================================================================
5935 * FUNCTION   : get3Aversion
5936 *
5937 * DESCRIPTION: get the Q3A S/W version
5938 *
5939 * PARAMETERS :
5940 *  @sw_version: Reference of Q3A structure which will hold version info upon
5941 *               return
5942 *
5943 * RETURN     : None
5944 *
5945 *==========================================================================*/
5946void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5947{
5948    if(gCamCapability[mCameraId])
5949        sw_version = gCamCapability[mCameraId]->q3a_version;
5950    else
5951        LOGE("Capability structure NULL!");
5952}
5953
5954
5955/*===========================================================================
5956 * FUNCTION   : initParameters
5957 *
5958 * DESCRIPTION: initialize camera parameters
5959 *
5960 * PARAMETERS :
5961 *
5962 * RETURN     : int32_t type of status
5963 *              NO_ERROR  -- success
5964 *              none-zero failure code
5965 *==========================================================================*/
5966int QCamera3HardwareInterface::initParameters()
5967{
5968    int rc = 0;
5969
5970    //Allocate Set Param Buffer
5971    mParamHeap = new QCamera3HeapMemory(1);
5972    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5973    if(rc != OK) {
5974        rc = NO_MEMORY;
5975        LOGE("Failed to allocate SETPARM Heap memory");
5976        delete mParamHeap;
5977        mParamHeap = NULL;
5978        return rc;
5979    }
5980
5981    //Map memory for parameters buffer
5982    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5983            CAM_MAPPING_BUF_TYPE_PARM_BUF,
5984            mParamHeap->getFd(0),
5985            sizeof(metadata_buffer_t));
5986    if(rc < 0) {
5987        LOGE("failed to map SETPARM buffer");
5988        rc = FAILED_TRANSACTION;
5989        mParamHeap->deallocate();
5990        delete mParamHeap;
5991        mParamHeap = NULL;
5992        return rc;
5993    }
5994
5995    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5996
5997    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5998    return rc;
5999}
6000
6001/*===========================================================================
6002 * FUNCTION   : deinitParameters
6003 *
6004 * DESCRIPTION: de-initialize camera parameters
6005 *
6006 * PARAMETERS :
6007 *
6008 * RETURN     : NONE
6009 *==========================================================================*/
6010void QCamera3HardwareInterface::deinitParameters()
6011{
6012    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6013            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6014
6015    mParamHeap->deallocate();
6016    delete mParamHeap;
6017    mParamHeap = NULL;
6018
6019    mParameters = NULL;
6020
6021    free(mPrevParameters);
6022    mPrevParameters = NULL;
6023}
6024
6025/*===========================================================================
6026 * FUNCTION   : calcMaxJpegSize
6027 *
6028 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6029 *
6030 * PARAMETERS :
6031 *
6032 * RETURN     : max_jpeg_size
6033 *==========================================================================*/
6034size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6035{
6036    size_t max_jpeg_size = 0;
6037    size_t temp_width, temp_height;
6038    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6039            MAX_SIZES_CNT);
6040    for (size_t i = 0; i < count; i++) {
6041        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6042        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6043        if (temp_width * temp_height > max_jpeg_size ) {
6044            max_jpeg_size = temp_width * temp_height;
6045        }
6046    }
6047    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6048    return max_jpeg_size;
6049}
6050
6051/*===========================================================================
6052 * FUNCTION   : getMaxRawSize
6053 *
6054 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6055 *
6056 * PARAMETERS :
6057 *
6058 * RETURN     : Largest supported Raw Dimension
6059 *==========================================================================*/
6060cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6061{
6062    int max_width = 0;
6063    cam_dimension_t maxRawSize;
6064
6065    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6066    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6067        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6068            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6069            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6070        }
6071    }
6072    return maxRawSize;
6073}
6074
6075
6076/*===========================================================================
6077 * FUNCTION   : calcMaxJpegDim
6078 *
6079 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6080 *
6081 * PARAMETERS :
6082 *
6083 * RETURN     : max_jpeg_dim
6084 *==========================================================================*/
6085cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6086{
6087    cam_dimension_t max_jpeg_dim;
6088    cam_dimension_t curr_jpeg_dim;
6089    max_jpeg_dim.width = 0;
6090    max_jpeg_dim.height = 0;
6091    curr_jpeg_dim.width = 0;
6092    curr_jpeg_dim.height = 0;
6093    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6094        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6095        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6096        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6097            max_jpeg_dim.width * max_jpeg_dim.height ) {
6098            max_jpeg_dim.width = curr_jpeg_dim.width;
6099            max_jpeg_dim.height = curr_jpeg_dim.height;
6100        }
6101    }
6102    return max_jpeg_dim;
6103}
6104
6105/*===========================================================================
6106 * FUNCTION   : addStreamConfig
6107 *
6108 * DESCRIPTION: adds the stream configuration to the array
6109 *
6110 * PARAMETERS :
6111 * @available_stream_configs : pointer to stream configuration array
6112 * @scalar_format            : scalar format
6113 * @dim                      : configuration dimension
6114 * @config_type              : input or output configuration type
6115 *
6116 * RETURN     : NONE
6117 *==========================================================================*/
6118void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6119        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6120{
6121    available_stream_configs.add(scalar_format);
6122    available_stream_configs.add(dim.width);
6123    available_stream_configs.add(dim.height);
6124    available_stream_configs.add(config_type);
6125}
6126
6127/*===========================================================================
6128 * FUNCTION   : suppportBurstCapture
6129 *
6130 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6131 *
6132 * PARAMETERS :
6133 *   @cameraId  : camera Id
6134 *
6135 * RETURN     : true if camera supports BURST_CAPTURE
6136 *              false otherwise
6137 *==========================================================================*/
6138bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6139{
6140    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6141    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6142    const int32_t highResWidth = 3264;
6143    const int32_t highResHeight = 2448;
6144
6145    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6146        // Maximum resolution images cannot be captured at >= 10fps
6147        // -> not supporting BURST_CAPTURE
6148        return false;
6149    }
6150
6151    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6152        // Maximum resolution images can be captured at >= 20fps
6153        // --> supporting BURST_CAPTURE
6154        return true;
6155    }
6156
6157    // Find the smallest highRes resolution, or largest resolution if there is none
6158    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6159            MAX_SIZES_CNT);
6160    size_t highRes = 0;
6161    while ((highRes + 1 < totalCnt) &&
6162            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6163            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6164            highResWidth * highResHeight)) {
6165        highRes++;
6166    }
6167    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6168        return true;
6169    } else {
6170        return false;
6171    }
6172}
6173
6174/*===========================================================================
6175 * FUNCTION   : initStaticMetadata
6176 *
6177 * DESCRIPTION: initialize the static metadata
6178 *
6179 * PARAMETERS :
6180 *   @cameraId  : camera Id
6181 *
6182 * RETURN     : int32_t type of status
6183 *              0  -- success
6184 *              non-zero failure code
6185 *==========================================================================*/
6186int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6187{
6188    int rc = 0;
6189    CameraMetadata staticInfo;
6190    size_t count = 0;
6191    bool limitedDevice = false;
6192    char prop[PROPERTY_VALUE_MAX];
6193    bool supportBurst = false;
6194
6195    supportBurst = supportBurstCapture(cameraId);
6196
6197    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6198     * guaranteed or if min fps of max resolution is less than 20 fps, its
6199     * advertised as limited device*/
6200    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6201            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6202            !supportBurst;
6203
6204    uint8_t supportedHwLvl = limitedDevice ?
6205            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6206            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
6207
6208    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6209            &supportedHwLvl, 1);
6210
6211    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6212    /*HAL 3 only*/
6213    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6214                    &gCamCapability[cameraId]->min_focus_distance, 1);
6215
6216    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6217                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6218
6219    /*should be using focal lengths but sensor doesn't provide that info now*/
6220    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6221                      &gCamCapability[cameraId]->focal_length,
6222                      1);
6223
6224    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6225            gCamCapability[cameraId]->apertures,
6226            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6227
6228    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6229            gCamCapability[cameraId]->filter_densities,
6230            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6231
6232
6233    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6234            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6235            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6236
6237    int32_t lens_shading_map_size[] = {
6238            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6239            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6240    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6241                      lens_shading_map_size,
6242                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6243
6244    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6245            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6246
6247    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6248            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6249
6250    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6251            &gCamCapability[cameraId]->max_frame_duration, 1);
6252
6253    camera_metadata_rational baseGainFactor = {
6254            gCamCapability[cameraId]->base_gain_factor.numerator,
6255            gCamCapability[cameraId]->base_gain_factor.denominator};
6256    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6257                      &baseGainFactor, 1);
6258
6259    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6260                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6261
6262    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6263            gCamCapability[cameraId]->pixel_array_size.height};
6264    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6265                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6266
6267    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6268            gCamCapability[cameraId]->active_array_size.top,
6269            gCamCapability[cameraId]->active_array_size.width,
6270            gCamCapability[cameraId]->active_array_size.height};
6271    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6272            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6273
6274    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6275            &gCamCapability[cameraId]->white_level, 1);
6276
6277    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6278            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6279
6280    bool hasBlackRegions = false;
6281    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6282        LOGW("black_region_count: %d is bounded to %d",
6283            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6284        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6285    }
6286    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6287        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6288        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6289            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6290        }
6291        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6292                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6293        hasBlackRegions = true;
6294    }
6295
6296    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6297            &gCamCapability[cameraId]->flash_charge_duration, 1);
6298
6299    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6300            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6301
6302    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
6303    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6304            &timestampSource, 1);
6305
6306    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6307            &gCamCapability[cameraId]->histogram_size, 1);
6308
6309    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6310            &gCamCapability[cameraId]->max_histogram_count, 1);
6311
6312    int32_t sharpness_map_size[] = {
6313            gCamCapability[cameraId]->sharpness_map_size.width,
6314            gCamCapability[cameraId]->sharpness_map_size.height};
6315
6316    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6317            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6318
6319    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6320            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6321
6322    int32_t scalar_formats[] = {
6323            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6324            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6325            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6326            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6327            HAL_PIXEL_FORMAT_RAW10,
6328            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6329    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6330    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6331                      scalar_formats,
6332                      scalar_formats_count);
6333
6334    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6335    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6336    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6337            count, MAX_SIZES_CNT, available_processed_sizes);
6338    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6339            available_processed_sizes, count * 2);
6340
6341    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6342    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6343    makeTable(gCamCapability[cameraId]->raw_dim,
6344            count, MAX_SIZES_CNT, available_raw_sizes);
6345    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6346            available_raw_sizes, count * 2);
6347
6348    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6349    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6350    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6351            count, MAX_SIZES_CNT, available_fps_ranges);
6352    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6353            available_fps_ranges, count * 2);
6354
6355    camera_metadata_rational exposureCompensationStep = {
6356            gCamCapability[cameraId]->exp_compensation_step.numerator,
6357            gCamCapability[cameraId]->exp_compensation_step.denominator};
6358    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6359                      &exposureCompensationStep, 1);
6360
6361    Vector<uint8_t> availableVstabModes;
6362    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6363    char eis_prop[PROPERTY_VALUE_MAX];
6364    memset(eis_prop, 0, sizeof(eis_prop));
6365    property_get("persist.camera.eis.enable", eis_prop, "0");
6366    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6367    if (facingBack && eis_prop_set) {
6368        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6369    }
6370    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6371                      availableVstabModes.array(), availableVstabModes.size());
6372
6373    /*HAL 1 and HAL 3 common*/
6374    float maxZoom = 4;
6375    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6376            &maxZoom, 1);
6377
6378    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
6379    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6380
6381    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6382    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6383        max3aRegions[2] = 0; /* AF not supported */
6384    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6385            max3aRegions, 3);
6386
6387    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6388    memset(prop, 0, sizeof(prop));
6389    property_get("persist.camera.facedetect", prop, "1");
6390    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6391    LOGD("Support face detection mode: %d",
6392             supportedFaceDetectMode);
6393
6394    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6395    Vector<uint8_t> availableFaceDetectModes;
6396    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6397    if (supportedFaceDetectMode == 1) {
6398        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6399    } else if (supportedFaceDetectMode == 2) {
6400        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6401    } else if (supportedFaceDetectMode == 3) {
6402        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6403        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6404    } else {
6405        maxFaces = 0;
6406    }
6407    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6408            availableFaceDetectModes.array(),
6409            availableFaceDetectModes.size());
6410    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6411            (int32_t *)&maxFaces, 1);
6412
6413    int32_t exposureCompensationRange[] = {
6414            gCamCapability[cameraId]->exposure_compensation_min,
6415            gCamCapability[cameraId]->exposure_compensation_max};
6416    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6417            exposureCompensationRange,
6418            sizeof(exposureCompensationRange)/sizeof(int32_t));
6419
6420    uint8_t lensFacing = (facingBack) ?
6421            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6422    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6423
6424    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6425                      available_thumbnail_sizes,
6426                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6427
6428    /*all sizes will be clubbed into this tag*/
6429    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6430    /*android.scaler.availableStreamConfigurations*/
6431    Vector<int32_t> available_stream_configs;
6432    cam_dimension_t active_array_dim;
6433    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6434    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6435    /* Add input/output stream configurations for each scalar formats*/
6436    for (size_t j = 0; j < scalar_formats_count; j++) {
6437        switch (scalar_formats[j]) {
6438        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6439        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6440        case HAL_PIXEL_FORMAT_RAW10:
6441            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6442                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6443                addStreamConfig(available_stream_configs, scalar_formats[j],
6444                        gCamCapability[cameraId]->raw_dim[i],
6445                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6446            }
6447            break;
6448        case HAL_PIXEL_FORMAT_BLOB:
6449            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6450                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6451                addStreamConfig(available_stream_configs, scalar_formats[j],
6452                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6453                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6454            }
6455            break;
6456        case HAL_PIXEL_FORMAT_YCbCr_420_888:
6457        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6458        default:
6459            cam_dimension_t largest_picture_size;
6460            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6461            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6462                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6463                addStreamConfig(available_stream_configs, scalar_formats[j],
6464                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6465                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6466                /* Book keep largest */
6467                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6468                        >= largest_picture_size.width &&
6469                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
6470                        >= largest_picture_size.height)
6471                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6472            }
6473            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6474            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6475                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6476                 addStreamConfig(available_stream_configs, scalar_formats[j],
6477                         largest_picture_size,
6478                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6479            }
6480            break;
6481        }
6482    }
6483
6484    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6485                      available_stream_configs.array(), available_stream_configs.size());
6486    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6487    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6488
6489    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6490    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6491
6492    /* android.scaler.availableMinFrameDurations */
6493    Vector<int64_t> available_min_durations;
6494    for (size_t j = 0; j < scalar_formats_count; j++) {
6495        switch (scalar_formats[j]) {
6496        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6497        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6498        case HAL_PIXEL_FORMAT_RAW10:
6499            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6500                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6501                available_min_durations.add(scalar_formats[j]);
6502                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
6503                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
6504                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
6505            }
6506            break;
6507        default:
6508            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6509                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6510                available_min_durations.add(scalar_formats[j]);
6511                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
6512                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
6513                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
6514            }
6515            break;
6516        }
6517    }
6518    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6519                      available_min_durations.array(), available_min_durations.size());
6520
6521    Vector<int32_t> available_hfr_configs;
6522    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6523        int32_t fps = 0;
6524        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6525        case CAM_HFR_MODE_60FPS:
6526            fps = 60;
6527            break;
6528        case CAM_HFR_MODE_90FPS:
6529            fps = 90;
6530            break;
6531        case CAM_HFR_MODE_120FPS:
6532            fps = 120;
6533            break;
6534        case CAM_HFR_MODE_150FPS:
6535            fps = 150;
6536            break;
6537        case CAM_HFR_MODE_180FPS:
6538            fps = 180;
6539            break;
6540        case CAM_HFR_MODE_210FPS:
6541            fps = 210;
6542            break;
6543        case CAM_HFR_MODE_240FPS:
6544            fps = 240;
6545            break;
6546        case CAM_HFR_MODE_480FPS:
6547            fps = 480;
6548            break;
6549        case CAM_HFR_MODE_OFF:
6550        case CAM_HFR_MODE_MAX:
6551        default:
6552            break;
6553        }
6554
6555        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6556        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6557            /* For each HFR frame rate, need to advertise one variable fps range
6558             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
6559             * [120, 120]. While camcorder preview alone is running [30, 120] is
6560             * set by the app. When video recording is started, [120, 120] is
6561             * set. This way sensor configuration does not change when recording
6562             * is started */
6563
6564            size_t len = sizeof(default_hfr_video_sizes) / sizeof(default_hfr_video_sizes[0]);
6565            for (size_t j = 0; j < len; j++) {
6566                if ((default_hfr_video_sizes[j].width <= gCamCapability[cameraId]->hfr_tbl[i].dim.width) &&
6567                    (default_hfr_video_sizes[j].height <= gCamCapability[cameraId]->hfr_tbl[i].dim.height)) {
6568                    //TODO: Might need additional filtering based on VFE/CPP/CPU capabilities
6569
6570                    /* (width, height, fps_min, fps_max, batch_size_max) */
6571                    available_hfr_configs.add(default_hfr_video_sizes[j].width);
6572                    available_hfr_configs.add(default_hfr_video_sizes[j].height);
6573                    available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6574                    available_hfr_configs.add(fps);
6575                    available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6576
6577                    /* (width, height, fps_min, fps_max, batch_size_max) */
6578                    available_hfr_configs.add(default_hfr_video_sizes[j].width);
6579                    available_hfr_configs.add(default_hfr_video_sizes[j].height);
6580                    available_hfr_configs.add(fps);
6581                    available_hfr_configs.add(fps);
6582                    available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6583                }// if
6584            }// for (...; j < len;...)
6585       } //if (fps >= MIN_FPS_FOR_BATCH_MODE)
6586    }
6587    //Advertise HFR capability only if the property is set
6588    memset(prop, 0, sizeof(prop));
6589    property_get("persist.camera.hal3hfr.enable", prop, "1");
6590    uint8_t hfrEnable = (uint8_t)atoi(prop);
6591
6592    if(hfrEnable && available_hfr_configs.array()) {
6593        staticInfo.update(
6594                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6595                available_hfr_configs.array(), available_hfr_configs.size());
6596    }
6597
6598    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6599    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6600                      &max_jpeg_size, 1);
6601
6602    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6603    size_t size = 0;
6604    count = CAM_EFFECT_MODE_MAX;
6605    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6606    for (size_t i = 0; i < count; i++) {
6607        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6608                gCamCapability[cameraId]->supported_effects[i]);
6609        if (NAME_NOT_FOUND != val) {
6610            avail_effects[size] = (uint8_t)val;
6611            size++;
6612        }
6613    }
6614    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6615                      avail_effects,
6616                      size);
6617
6618    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6619    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6620    size_t supported_scene_modes_cnt = 0;
6621    count = CAM_SCENE_MODE_MAX;
6622    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6623    for (size_t i = 0; i < count; i++) {
6624        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6625                CAM_SCENE_MODE_OFF) {
6626            int val = lookupFwkName(SCENE_MODES_MAP,
6627                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
6628                    gCamCapability[cameraId]->supported_scene_modes[i]);
6629            if (NAME_NOT_FOUND != val) {
6630                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6631                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6632                supported_scene_modes_cnt++;
6633            }
6634        }
6635    }
6636    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6637                      avail_scene_modes,
6638                      supported_scene_modes_cnt);
6639
6640    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6641    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6642                      supported_scene_modes_cnt,
6643                      CAM_SCENE_MODE_MAX,
6644                      scene_mode_overrides,
6645                      supported_indexes,
6646                      cameraId);
6647
6648    if (supported_scene_modes_cnt == 0) {
6649        supported_scene_modes_cnt = 1;
6650        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6651    }
6652
6653    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6654            scene_mode_overrides, supported_scene_modes_cnt * 3);
6655
6656    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6657                                         ANDROID_CONTROL_MODE_AUTO,
6658                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6659    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6660            available_control_modes,
6661            3);
6662
6663    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6664    size = 0;
6665    count = CAM_ANTIBANDING_MODE_MAX;
6666    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6667    for (size_t i = 0; i < count; i++) {
6668        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6669                gCamCapability[cameraId]->supported_antibandings[i]);
6670        if (NAME_NOT_FOUND != val) {
6671            avail_antibanding_modes[size] = (uint8_t)val;
6672            size++;
6673        }
6674
6675    }
6676    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6677                      avail_antibanding_modes,
6678                      size);
6679
6680    uint8_t avail_abberation_modes[] = {
6681            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
6682            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
6683            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
6684    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6685    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6686    if (0 == count) {
6687        //  If no aberration correction modes are available for a device, this advertise OFF mode
6688        size = 1;
6689    } else {
6690        // If count is not zero then atleast one among the FAST or HIGH quality is supported
6691        // So, advertize all 3 modes if atleast any one mode is supported as per the
6692        // new M requirement
6693        size = 3;
6694    }
6695    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6696            avail_abberation_modes,
6697            size);
6698
6699    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6700    size = 0;
6701    count = CAM_FOCUS_MODE_MAX;
6702    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6703    for (size_t i = 0; i < count; i++) {
6704        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6705                gCamCapability[cameraId]->supported_focus_modes[i]);
6706        if (NAME_NOT_FOUND != val) {
6707            avail_af_modes[size] = (uint8_t)val;
6708            size++;
6709        }
6710    }
6711    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6712                      avail_af_modes,
6713                      size);
6714
6715    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6716    size = 0;
6717    count = CAM_WB_MODE_MAX;
6718    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6719    for (size_t i = 0; i < count; i++) {
6720        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6721                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6722                gCamCapability[cameraId]->supported_white_balances[i]);
6723        if (NAME_NOT_FOUND != val) {
6724            avail_awb_modes[size] = (uint8_t)val;
6725            size++;
6726        }
6727    }
6728    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6729                      avail_awb_modes,
6730                      size);
6731
6732    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6733    count = CAM_FLASH_FIRING_LEVEL_MAX;
6734    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6735            count);
6736    for (size_t i = 0; i < count; i++) {
6737        available_flash_levels[i] =
6738                gCamCapability[cameraId]->supported_firing_levels[i];
6739    }
6740    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6741            available_flash_levels, count);
6742
6743    uint8_t flashAvailable;
6744    if (gCamCapability[cameraId]->flash_available)
6745        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6746    else
6747        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6748    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6749            &flashAvailable, 1);
6750
6751    Vector<uint8_t> avail_ae_modes;
6752    count = CAM_AE_MODE_MAX;
6753    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6754    for (size_t i = 0; i < count; i++) {
6755        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6756    }
6757    if (flashAvailable) {
6758        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6759        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6760    }
6761    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6762                      avail_ae_modes.array(),
6763                      avail_ae_modes.size());
6764
6765    int32_t sensitivity_range[2];
6766    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6767    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6768    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6769                      sensitivity_range,
6770                      sizeof(sensitivity_range) / sizeof(int32_t));
6771
6772    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6773                      &gCamCapability[cameraId]->max_analog_sensitivity,
6774                      1);
6775
6776    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6777    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6778                      &sensor_orientation,
6779                      1);
6780
6781    int32_t max_output_streams[] = {
6782            MAX_STALLING_STREAMS,
6783            MAX_PROCESSED_STREAMS,
6784            MAX_RAW_STREAMS};
6785    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6786            max_output_streams,
6787            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6788
6789    uint8_t avail_leds = 0;
6790    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6791                      &avail_leds, 0);
6792
6793    uint8_t focus_dist_calibrated;
6794    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6795            gCamCapability[cameraId]->focus_dist_calibrated);
6796    if (NAME_NOT_FOUND != val) {
6797        focus_dist_calibrated = (uint8_t)val;
6798        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6799                     &focus_dist_calibrated, 1);
6800    }
6801
6802    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6803    size = 0;
6804    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6805            MAX_TEST_PATTERN_CNT);
6806    for (size_t i = 0; i < count; i++) {
6807        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6808                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6809        if (NAME_NOT_FOUND != testpatternMode) {
6810            avail_testpattern_modes[size] = testpatternMode;
6811            size++;
6812        }
6813    }
6814    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6815                      avail_testpattern_modes,
6816                      size);
6817
6818    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6819    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6820                      &max_pipeline_depth,
6821                      1);
6822
6823    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6824    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6825                      &partial_result_count,
6826                       1);
6827
6828    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6829    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6830
6831    Vector<uint8_t> available_capabilities;
6832    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6833    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6834    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6835    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6836    if (supportBurst) {
6837        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6838    }
6839    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6840    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6841    if (hfrEnable && available_hfr_configs.array()) {
6842        available_capabilities.add(
6843                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6844    }
6845
6846    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6847        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6848    }
6849    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6850            available_capabilities.array(),
6851            available_capabilities.size());
6852
6853    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
6854    //Assumption is that all bayer cameras support MANUAL_SENSOR.
6855    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6856            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6857
6858    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6859            &aeLockAvailable, 1);
6860
6861    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
6862    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
6863    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6864            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6865
6866    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6867            &awbLockAvailable, 1);
6868
6869    int32_t max_input_streams = 1;
6870    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6871                      &max_input_streams,
6872                      1);
6873
6874    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6875    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6876            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6877            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6878            HAL_PIXEL_FORMAT_YCbCr_420_888};
6879    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6880                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6881
6882    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6883    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6884                      &max_latency,
6885                      1);
6886
6887    int32_t isp_sensitivity_range[2];
6888    isp_sensitivity_range[0] =
6889        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
6890    isp_sensitivity_range[1] =
6891        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
6892    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
6893                      isp_sensitivity_range,
6894                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
6895
6896    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6897                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6898    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6899            available_hot_pixel_modes,
6900            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6901
6902    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6903                                         ANDROID_SHADING_MODE_FAST,
6904                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6905    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6906                      available_shading_modes,
6907                      3);
6908
6909    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6910                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6911    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6912                      available_lens_shading_map_modes,
6913                      2);
6914
6915    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6916                                      ANDROID_EDGE_MODE_FAST,
6917                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6918                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6919    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6920            available_edge_modes,
6921            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6922
6923    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6924                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6925                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6926                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6927                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6928    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6929            available_noise_red_modes,
6930            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6931
6932    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6933                                         ANDROID_TONEMAP_MODE_FAST,
6934                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6935    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6936            available_tonemap_modes,
6937            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6938
6939    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6940    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6941            available_hot_pixel_map_modes,
6942            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6943
6944    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6945            gCamCapability[cameraId]->reference_illuminant1);
6946    if (NAME_NOT_FOUND != val) {
6947        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6948        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6949    }
6950
6951    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6952            gCamCapability[cameraId]->reference_illuminant2);
6953    if (NAME_NOT_FOUND != val) {
6954        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6955        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6956    }
6957
6958    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6959            (void *)gCamCapability[cameraId]->forward_matrix1,
6960            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6961
6962    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6963            (void *)gCamCapability[cameraId]->forward_matrix2,
6964            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6965
6966    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6967            (void *)gCamCapability[cameraId]->color_transform1,
6968            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6969
6970    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6971            (void *)gCamCapability[cameraId]->color_transform2,
6972            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6973
6974    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6975            (void *)gCamCapability[cameraId]->calibration_transform1,
6976            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6977
6978    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6979            (void *)gCamCapability[cameraId]->calibration_transform2,
6980            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6981
6982    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6983       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6984       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6985       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6986       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6987       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6988       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6989       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6990       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6991       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6992       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6993       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6994       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6995       ANDROID_JPEG_GPS_COORDINATES,
6996       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6997       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6998       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6999       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7000       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7001       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7002       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7003       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7004       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7005       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7006       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7007       ANDROID_STATISTICS_FACE_DETECT_MODE,
7008       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7009       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7010       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7011       ANDROID_BLACK_LEVEL_LOCK };
7012
7013    size_t request_keys_cnt =
7014            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7015    Vector<int32_t> available_request_keys;
7016    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7017    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7018        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7019    }
7020
7021    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7022            available_request_keys.array(), available_request_keys.size());
7023
7024    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7025       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7026       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7027       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7028       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7029       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7030       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7031       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7032       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7033       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7034       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7035       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7036       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7037       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7038       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7039       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7040       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7041       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7042       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7043       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7044       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7045       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7046       ANDROID_STATISTICS_FACE_SCORES,
7047       ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
7048       ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7049       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
7050    size_t result_keys_cnt =
7051            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7052
7053    Vector<int32_t> available_result_keys;
7054    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7055    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7056        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7057    }
7058    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7059        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7060        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7061    }
7062    if (supportedFaceDetectMode == 1) {
7063        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7064        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7065    } else if ((supportedFaceDetectMode == 2) ||
7066            (supportedFaceDetectMode == 3)) {
7067        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7068        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7069    }
7070    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7071            available_result_keys.array(), available_result_keys.size());
7072
7073    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7074       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7075       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7076       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7077       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7078       ANDROID_SCALER_CROPPING_TYPE,
7079       ANDROID_SYNC_MAX_LATENCY,
7080       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7081       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7082       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7083       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7084       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7085       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7086       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7087       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7088       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7089       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7090       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7091       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7092       ANDROID_LENS_FACING,
7093       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7094       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7095       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7096       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7097       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7098       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7099       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7100       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7101       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7102       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7103       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7104       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7105       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7106       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7107       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7108       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7109       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7110       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7111       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7112       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7113       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7114       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7115       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7116       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7117       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7118       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7119       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7120       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7121       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7122       ANDROID_CONTROL_AVAILABLE_MODES,
7123       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7124       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7125       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7126       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7127       ANDROID_SHADING_AVAILABLE_MODES,
7128       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7129       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7130
7131    Vector<int32_t> available_characteristics_keys;
7132    available_characteristics_keys.appendArray(characteristics_keys_basic,
7133            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7134    if (hasBlackRegions) {
7135        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7136    }
7137    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7138                      available_characteristics_keys.array(),
7139                      available_characteristics_keys.size());
7140
7141    /*available stall durations depend on the hw + sw and will be different for different devices */
7142    /*have to add for raw after implementation*/
7143    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7144    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7145
7146    Vector<int64_t> available_stall_durations;
7147    for (uint32_t j = 0; j < stall_formats_count; j++) {
7148        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7149            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7150                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7151                available_stall_durations.add(stall_formats[j]);
7152                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7153                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7154                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7155          }
7156        } else {
7157            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7158                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7159                available_stall_durations.add(stall_formats[j]);
7160                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7161                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7162                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7163            }
7164        }
7165    }
7166    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7167                      available_stall_durations.array(),
7168                      available_stall_durations.size());
7169
7170    //QCAMERA3_OPAQUE_RAW
7171    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7172    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7173    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7174    case LEGACY_RAW:
7175        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7176            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7177        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7178            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7179        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7180            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7181        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7182        break;
7183    case MIPI_RAW:
7184        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7185            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7186        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7187            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7188        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7189            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7190        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7191        break;
7192    default:
7193        LOGE("unknown opaque_raw_format %d",
7194                gCamCapability[cameraId]->opaque_raw_fmt);
7195        break;
7196    }
7197    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7198
7199    Vector<int32_t> strides;
7200    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7201            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7202        cam_stream_buf_plane_info_t buf_planes;
7203        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7204        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7205        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7206            &gCamCapability[cameraId]->padding_info, &buf_planes);
7207        strides.add(buf_planes.plane_info.mp[0].stride);
7208    }
7209    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7210            strides.size());
7211
7212    Vector<int32_t> opaque_size;
7213    for (size_t j = 0; j < scalar_formats_count; j++) {
7214        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7215            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7216                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7217                cam_stream_buf_plane_info_t buf_planes;
7218
7219                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7220                         &gCamCapability[cameraId]->padding_info, &buf_planes);
7221
7222                if (rc == 0) {
7223                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7224                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7225                    opaque_size.add(buf_planes.plane_info.frame_len);
7226                }else {
7227                    LOGE("raw frame calculation failed!");
7228                }
7229            }
7230        }
7231    }
7232
7233    if ((opaque_size.size() > 0) &&
7234            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7235        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7236    else
7237        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7238
7239    gStaticMetadata[cameraId] = staticInfo.release();
7240    return rc;
7241}
7242
7243/*===========================================================================
7244 * FUNCTION   : makeTable
7245 *
7246 * DESCRIPTION: make a table of sizes
7247 *
7248 * PARAMETERS :
7249 *
7250 *
7251 *==========================================================================*/
7252void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7253        size_t max_size, int32_t *sizeTable)
7254{
7255    size_t j = 0;
7256    if (size > max_size) {
7257       size = max_size;
7258    }
7259    for (size_t i = 0; i < size; i++) {
7260        sizeTable[j] = dimTable[i].width;
7261        sizeTable[j+1] = dimTable[i].height;
7262        j+=2;
7263    }
7264}
7265
7266/*===========================================================================
7267 * FUNCTION   : makeFPSTable
7268 *
7269 * DESCRIPTION: make a table of fps ranges
7270 *
7271 * PARAMETERS :
7272 *
7273 *==========================================================================*/
7274void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7275        size_t max_size, int32_t *fpsRangesTable)
7276{
7277    size_t j = 0;
7278    if (size > max_size) {
7279       size = max_size;
7280    }
7281    for (size_t i = 0; i < size; i++) {
7282        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7283        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7284        j+=2;
7285    }
7286}
7287
7288/*===========================================================================
7289 * FUNCTION   : makeOverridesList
7290 *
7291 * DESCRIPTION: make a list of scene mode overrides
7292 *
7293 * PARAMETERS :
7294 *
7295 *
7296 *==========================================================================*/
7297void QCamera3HardwareInterface::makeOverridesList(
7298        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7299        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7300{
7301    /*daemon will give a list of overrides for all scene modes.
7302      However we should send the fwk only the overrides for the scene modes
7303      supported by the framework*/
7304    size_t j = 0;
7305    if (size > max_size) {
7306       size = max_size;
7307    }
7308    size_t focus_count = CAM_FOCUS_MODE_MAX;
7309    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7310            focus_count);
7311    for (size_t i = 0; i < size; i++) {
7312        bool supt = false;
7313        size_t index = supported_indexes[i];
7314        overridesList[j] = gCamCapability[camera_id]->flash_available ?
7315                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7316        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7317                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7318                overridesTable[index].awb_mode);
7319        if (NAME_NOT_FOUND != val) {
7320            overridesList[j+1] = (uint8_t)val;
7321        }
7322        uint8_t focus_override = overridesTable[index].af_mode;
7323        for (size_t k = 0; k < focus_count; k++) {
7324           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7325              supt = true;
7326              break;
7327           }
7328        }
7329        if (supt) {
7330            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7331                    focus_override);
7332            if (NAME_NOT_FOUND != val) {
7333                overridesList[j+2] = (uint8_t)val;
7334            }
7335        } else {
7336           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7337        }
7338        j+=3;
7339    }
7340}
7341
7342/*===========================================================================
7343 * FUNCTION   : filterJpegSizes
7344 *
7345 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7346 *              could be downscaled to
7347 *
7348 * PARAMETERS :
7349 *
7350 * RETURN     : length of jpegSizes array
7351 *==========================================================================*/
7352
7353size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7354        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7355        uint8_t downscale_factor)
7356{
7357    if (0 == downscale_factor) {
7358        downscale_factor = 1;
7359    }
7360
7361    int32_t min_width = active_array_size.width / downscale_factor;
7362    int32_t min_height = active_array_size.height / downscale_factor;
7363    size_t jpegSizesCnt = 0;
7364    if (processedSizesCnt > maxCount) {
7365        processedSizesCnt = maxCount;
7366    }
7367    for (size_t i = 0; i < processedSizesCnt; i+=2) {
7368        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7369            jpegSizes[jpegSizesCnt] = processedSizes[i];
7370            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7371            jpegSizesCnt += 2;
7372        }
7373    }
7374    return jpegSizesCnt;
7375}
7376
7377/*===========================================================================
7378 * FUNCTION   : getPreviewHalPixelFormat
7379 *
7380 * DESCRIPTION: convert the format to type recognized by framework
7381 *
7382 * PARAMETERS : format : the format from backend
7383 *
7384 ** RETURN    : format recognized by framework
7385 *
7386 *==========================================================================*/
7387int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
7388{
7389    int32_t halPixelFormat;
7390
7391    switch (format) {
7392    case CAM_FORMAT_YUV_420_NV12:
7393        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
7394        break;
7395    case CAM_FORMAT_YUV_420_NV21:
7396        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7397        break;
7398    case CAM_FORMAT_YUV_420_NV21_ADRENO:
7399        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
7400        break;
7401    case CAM_FORMAT_YUV_420_YV12:
7402        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
7403        break;
7404    case CAM_FORMAT_YUV_422_NV16:
7405    case CAM_FORMAT_YUV_422_NV61:
7406    default:
7407        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7408        break;
7409    }
7410    return halPixelFormat;
7411}
7412
7413/*===========================================================================
7414 * FUNCTION   : computeNoiseModelEntryS
7415 *
7416 * DESCRIPTION: function to map a given sensitivity to the S noise
7417 *              model parameters in the DNG noise model.
7418 *
7419 * PARAMETERS : sens : the sensor sensitivity
7420 *
7421 ** RETURN    : S (sensor amplification) noise
7422 *
7423 *==========================================================================*/
7424double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7425    double s = gCamCapability[mCameraId]->gradient_S * sens +
7426            gCamCapability[mCameraId]->offset_S;
7427    return ((s < 0.0) ? 0.0 : s);
7428}
7429
7430/*===========================================================================
7431 * FUNCTION   : computeNoiseModelEntryO
7432 *
7433 * DESCRIPTION: function to map a given sensitivity to the O noise
7434 *              model parameters in the DNG noise model.
7435 *
7436 * PARAMETERS : sens : the sensor sensitivity
7437 *
7438 ** RETURN    : O (sensor readout) noise
7439 *
7440 *==========================================================================*/
7441double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7442    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7443    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7444            1.0 : (1.0 * sens / max_analog_sens);
7445    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7446            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7447    return ((o < 0.0) ? 0.0 : o);
7448}
7449
7450/*===========================================================================
7451 * FUNCTION   : getSensorSensitivity
7452 *
7453 * DESCRIPTION: convert iso_mode to an integer value
7454 *
7455 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7456 *
7457 ** RETURN    : sensitivity supported by sensor
7458 *
7459 *==========================================================================*/
7460int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7461{
7462    int32_t sensitivity;
7463
7464    switch (iso_mode) {
7465    case CAM_ISO_MODE_100:
7466        sensitivity = 100;
7467        break;
7468    case CAM_ISO_MODE_200:
7469        sensitivity = 200;
7470        break;
7471    case CAM_ISO_MODE_400:
7472        sensitivity = 400;
7473        break;
7474    case CAM_ISO_MODE_800:
7475        sensitivity = 800;
7476        break;
7477    case CAM_ISO_MODE_1600:
7478        sensitivity = 1600;
7479        break;
7480    default:
7481        sensitivity = -1;
7482        break;
7483    }
7484    return sensitivity;
7485}
7486
7487/*===========================================================================
7488 * FUNCTION   : getCamInfo
7489 *
7490 * DESCRIPTION: query camera capabilities
7491 *
7492 * PARAMETERS :
7493 *   @cameraId  : camera Id
7494 *   @info      : camera info struct to be filled in with camera capabilities
7495 *
7496 * RETURN     : int type of status
7497 *              NO_ERROR  -- success
7498 *              none-zero failure code
7499 *==========================================================================*/
7500int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7501        struct camera_info *info)
7502{
7503    ATRACE_CALL();
7504    int rc = 0;
7505
7506    pthread_mutex_lock(&gCamLock);
7507    if (NULL == gCamCapability[cameraId]) {
7508        rc = initCapabilities(cameraId);
7509        if (rc < 0) {
7510            pthread_mutex_unlock(&gCamLock);
7511            return rc;
7512        }
7513    }
7514
7515    if (NULL == gStaticMetadata[cameraId]) {
7516        rc = initStaticMetadata(cameraId);
7517        if (rc < 0) {
7518            pthread_mutex_unlock(&gCamLock);
7519            return rc;
7520        }
7521    }
7522
7523    switch(gCamCapability[cameraId]->position) {
7524    case CAM_POSITION_BACK:
7525        info->facing = CAMERA_FACING_BACK;
7526        break;
7527
7528    case CAM_POSITION_FRONT:
7529        info->facing = CAMERA_FACING_FRONT;
7530        break;
7531
7532    default:
7533        LOGE("Unknown position type for camera id:%d", cameraId);
7534        rc = -1;
7535        break;
7536    }
7537
7538
7539    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7540    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
7541    info->static_camera_characteristics = gStaticMetadata[cameraId];
7542
7543    //For now assume both cameras can operate independently.
7544    info->conflicting_devices = NULL;
7545    info->conflicting_devices_length = 0;
7546
7547    //resource cost is 100 * MIN(1.0, m/M),
7548    //where m is throughput requirement with maximum stream configuration
7549    //and M is CPP maximum throughput.
7550    float max_fps = 0.0;
7551    for (uint32_t i = 0;
7552            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7553        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7554            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7555    }
7556    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7557            gCamCapability[cameraId]->active_array_size.width *
7558            gCamCapability[cameraId]->active_array_size.height * max_fps /
7559            gCamCapability[cameraId]->max_pixel_bandwidth;
7560    info->resource_cost = 100 * MIN(1.0, ratio);
7561    LOGI("camera %d resource cost is %d", cameraId,
7562            info->resource_cost);
7563
7564    pthread_mutex_unlock(&gCamLock);
7565    return rc;
7566}
7567
7568/*===========================================================================
7569 * FUNCTION   : translateCapabilityToMetadata
7570 *
7571 * DESCRIPTION: translate the capability into camera_metadata_t
7572 *
7573 * PARAMETERS : type of the request
7574 *
7575 *
7576 * RETURN     : success: camera_metadata_t*
7577 *              failure: NULL
7578 *
7579 *==========================================================================*/
7580camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7581{
7582    if (mDefaultMetadata[type] != NULL) {
7583        return mDefaultMetadata[type];
7584    }
7585    //first time we are handling this request
7586    //fill up the metadata structure using the wrapper class
7587    CameraMetadata settings;
7588    //translate from cam_capability_t to camera_metadata_tag_t
7589    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7590    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7591    int32_t defaultRequestID = 0;
7592    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7593
7594    /* OIS disable */
7595    char ois_prop[PROPERTY_VALUE_MAX];
7596    memset(ois_prop, 0, sizeof(ois_prop));
7597    property_get("persist.camera.ois.disable", ois_prop, "0");
7598    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7599
7600    /* Force video to use OIS */
7601    char videoOisProp[PROPERTY_VALUE_MAX];
7602    memset(videoOisProp, 0, sizeof(videoOisProp));
7603    property_get("persist.camera.ois.video", videoOisProp, "1");
7604    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7605
7606    // EIS enable/disable
7607    char eis_prop[PROPERTY_VALUE_MAX];
7608    memset(eis_prop, 0, sizeof(eis_prop));
7609    property_get("persist.camera.eis.enable", eis_prop, "0");
7610    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7611
7612    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7613    // This is a bit hacky. EIS is enabled only when the above setprop
7614    // is set to non-zero value and on back camera (for 2015 Nexus).
7615    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7616    // configureStream is called before this function. In other words,
7617    // we cannot guarantee the app will call configureStream before
7618    // calling createDefaultRequest.
7619    const bool eisEnabled = facingBack && eis_prop_set;
7620
7621    uint8_t controlIntent = 0;
7622    uint8_t focusMode;
7623    uint8_t vsMode;
7624    uint8_t optStabMode;
7625    uint8_t cacMode;
7626    uint8_t edge_mode;
7627    uint8_t noise_red_mode;
7628    uint8_t tonemap_mode;
7629    bool highQualityModeEntryAvailable = FALSE;
7630    bool fastModeEntryAvailable = FALSE;
7631    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7632    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7633    switch (type) {
7634      case CAMERA3_TEMPLATE_PREVIEW:
7635        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7636        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7637        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7638        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7639        edge_mode = ANDROID_EDGE_MODE_FAST;
7640        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7641        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7642        break;
7643      case CAMERA3_TEMPLATE_STILL_CAPTURE:
7644        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7645        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7646        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7647        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7648        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7649        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7650        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7651        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
7652        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
7653            if (gCamCapability[mCameraId]->aberration_modes[i] ==
7654                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
7655                highQualityModeEntryAvailable = TRUE;
7656            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
7657                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
7658                fastModeEntryAvailable = TRUE;
7659            }
7660        }
7661        if (highQualityModeEntryAvailable) {
7662            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7663        } else if (fastModeEntryAvailable) {
7664            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7665        }
7666        break;
7667      case CAMERA3_TEMPLATE_VIDEO_RECORD:
7668        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7669        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7670        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7671        if (eisEnabled) {
7672            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7673        }
7674        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7675        edge_mode = ANDROID_EDGE_MODE_FAST;
7676        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7677        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7678        if (forceVideoOis)
7679            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7680        break;
7681      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7682        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7683        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7684        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7685        if (eisEnabled) {
7686            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7687        }
7688        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7689        edge_mode = ANDROID_EDGE_MODE_FAST;
7690        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7691        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7692        if (forceVideoOis)
7693            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7694        break;
7695      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7696        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7697        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7698        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7699        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7700        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7701        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7702        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7703        break;
7704      case CAMERA3_TEMPLATE_MANUAL:
7705        edge_mode = ANDROID_EDGE_MODE_FAST;
7706        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7707        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7708        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7709        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7710        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7711        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7712        break;
7713      default:
7714        edge_mode = ANDROID_EDGE_MODE_FAST;
7715        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7716        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7717        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7718        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7719        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7720        break;
7721    }
7722    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7723    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7724    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7725    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7726        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7727    }
7728    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7729
7730    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7731            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7732        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7733    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7734            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7735            || ois_disable)
7736        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7737    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7738
7739    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7740            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7741
7742    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7743    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7744
7745    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7746    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7747
7748    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7749    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7750
7751    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7752    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7753
7754    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7755    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7756
7757    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7758    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7759
7760    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7761    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7762
7763    /*flash*/
7764    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7765    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7766
7767    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7768    settings.update(ANDROID_FLASH_FIRING_POWER,
7769            &flashFiringLevel, 1);
7770
7771    /* lens */
7772    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7773    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7774
7775    if (gCamCapability[mCameraId]->filter_densities_count) {
7776        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7777        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7778                        gCamCapability[mCameraId]->filter_densities_count);
7779    }
7780
7781    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7782    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7783
7784    float default_focus_distance = 0;
7785    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7786
7787    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7788    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7789
7790    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7791    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7792
7793    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7794    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7795
7796    /* face detection (default to OFF) */
7797    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7798    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7799
7800    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7801    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7802
7803    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7804    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7805
7806    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7807    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7808
7809    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7810    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7811
7812    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7813    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7814
7815    /* Exposure time(Update the Min Exposure Time)*/
7816    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7817    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7818
7819    /* frame duration */
7820    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7821    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7822
7823    /* sensitivity */
7824    static const int32_t default_sensitivity = 100;
7825    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7826
7827    /*edge mode*/
7828    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7829
7830    /*noise reduction mode*/
7831    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7832
7833    /*color correction mode*/
7834    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7835    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7836
7837    /*transform matrix mode*/
7838    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7839
7840    int32_t scaler_crop_region[4];
7841    scaler_crop_region[0] = 0;
7842    scaler_crop_region[1] = 0;
7843    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7844    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7845    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7846
7847    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7848    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7849
7850    /*focus distance*/
7851    float focus_distance = 0.0;
7852    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7853
7854    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7855    float max_range = 0.0;
7856    float max_fixed_fps = 0.0;
7857    int32_t fps_range[2] = {0, 0};
7858    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7859            i++) {
7860        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7861            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7862        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7863                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7864                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7865            if (range > max_range) {
7866                fps_range[0] =
7867                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7868                fps_range[1] =
7869                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7870                max_range = range;
7871            }
7872        } else {
7873            if (range < 0.01 && max_fixed_fps <
7874                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7875                fps_range[0] =
7876                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7877                fps_range[1] =
7878                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7879                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7880            }
7881        }
7882    }
7883    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7884
7885    /*precapture trigger*/
7886    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7887    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7888
7889    /*af trigger*/
7890    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7891    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7892
7893    /* ae & af regions */
7894    int32_t active_region[] = {
7895            gCamCapability[mCameraId]->active_array_size.left,
7896            gCamCapability[mCameraId]->active_array_size.top,
7897            gCamCapability[mCameraId]->active_array_size.left +
7898                    gCamCapability[mCameraId]->active_array_size.width,
7899            gCamCapability[mCameraId]->active_array_size.top +
7900                    gCamCapability[mCameraId]->active_array_size.height,
7901            0};
7902    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7903            sizeof(active_region) / sizeof(active_region[0]));
7904    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7905            sizeof(active_region) / sizeof(active_region[0]));
7906
7907    /* black level lock */
7908    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7909    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7910
7911    /* lens shading map mode */
7912    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7913    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7914        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7915    }
7916    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7917
7918    //special defaults for manual template
7919    if (type == CAMERA3_TEMPLATE_MANUAL) {
7920        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7921        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7922
7923        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7924        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7925
7926        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7927        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7928
7929        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7930        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7931
7932        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7933        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7934
7935        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7936        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7937    }
7938
7939
7940    /* TNR
7941     * We'll use this location to determine which modes TNR will be set.
7942     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7943     * This is not to be confused with linking on a per stream basis that decision
7944     * is still on per-session basis and will be handled as part of config stream
7945     */
7946    uint8_t tnr_enable = 0;
7947
7948    if (m_bTnrPreview || m_bTnrVideo) {
7949
7950        switch (type) {
7951            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7952            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7953                    tnr_enable = 1;
7954                    break;
7955
7956            default:
7957                    tnr_enable = 0;
7958                    break;
7959        }
7960
7961        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7962        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7963        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7964
7965        LOGD("TNR:%d with process plate %d for template:%d",
7966                             tnr_enable, tnr_process_type, type);
7967    }
7968
7969    /* CDS default */
7970    char prop[PROPERTY_VALUE_MAX];
7971    memset(prop, 0, sizeof(prop));
7972    property_get("persist.camera.CDS", prop, "Auto");
7973    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7974    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7975    if (CAM_CDS_MODE_MAX == cds_mode) {
7976        cds_mode = CAM_CDS_MODE_AUTO;
7977    }
7978
7979    /* Disabling CDS in templates which have TNR enabled*/
7980    if (tnr_enable)
7981        cds_mode = CAM_CDS_MODE_OFF;
7982
7983    int32_t mode = cds_mode;
7984    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7985    mDefaultMetadata[type] = settings.release();
7986
7987    return mDefaultMetadata[type];
7988}
7989
7990/*===========================================================================
7991 * FUNCTION   : setFrameParameters
7992 *
7993 * DESCRIPTION: set parameters per frame as requested in the metadata from
7994 *              framework
7995 *
7996 * PARAMETERS :
7997 *   @request   : request that needs to be serviced
7998 *   @streamID : Stream ID of all the requested streams
7999 *   @blob_request: Whether this request is a blob request or not
8000 *
8001 * RETURN     : success: NO_ERROR
8002 *              failure:
8003 *==========================================================================*/
8004int QCamera3HardwareInterface::setFrameParameters(
8005                    camera3_capture_request_t *request,
8006                    cam_stream_ID_t streamID,
8007                    int blob_request,
8008                    uint32_t snapshotStreamId)
8009{
8010    /*translate from camera_metadata_t type to parm_type_t*/
8011    int rc = 0;
8012    int32_t hal_version = CAM_HAL_V3;
8013
8014    clear_metadata_buffer(mParameters);
8015    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8016        LOGE("Failed to set hal version in the parameters");
8017        return BAD_VALUE;
8018    }
8019
8020    /*we need to update the frame number in the parameters*/
8021    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8022            request->frame_number)) {
8023        LOGE("Failed to set the frame number in the parameters");
8024        return BAD_VALUE;
8025    }
8026
8027    /* Update stream id of all the requested buffers */
8028    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8029        LOGE("Failed to set stream type mask in the parameters");
8030        return BAD_VALUE;
8031    }
8032
8033    if (mUpdateDebugLevel) {
8034        uint32_t dummyDebugLevel = 0;
8035        /* The value of dummyDebugLevel is irrelavent. On
8036         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8037        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8038                dummyDebugLevel)) {
8039            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8040            return BAD_VALUE;
8041        }
8042        mUpdateDebugLevel = false;
8043    }
8044
8045    if(request->settings != NULL){
8046        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8047        if (blob_request)
8048            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8049    }
8050
8051    return rc;
8052}
8053
8054/*===========================================================================
8055 * FUNCTION   : setReprocParameters
8056 *
8057 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8058 *              return it.
8059 *
8060 * PARAMETERS :
8061 *   @request   : request that needs to be serviced
8062 *
8063 * RETURN     : success: NO_ERROR
8064 *              failure:
8065 *==========================================================================*/
8066int32_t QCamera3HardwareInterface::setReprocParameters(
8067        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8068        uint32_t snapshotStreamId)
8069{
8070    /*translate from camera_metadata_t type to parm_type_t*/
8071    int rc = 0;
8072
8073    if (NULL == request->settings){
8074        LOGE("Reprocess settings cannot be NULL");
8075        return BAD_VALUE;
8076    }
8077
8078    if (NULL == reprocParam) {
8079        LOGE("Invalid reprocessing metadata buffer");
8080        return BAD_VALUE;
8081    }
8082    clear_metadata_buffer(reprocParam);
8083
8084    /*we need to update the frame number in the parameters*/
8085    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8086            request->frame_number)) {
8087        LOGE("Failed to set the frame number in the parameters");
8088        return BAD_VALUE;
8089    }
8090
8091    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8092    if (rc < 0) {
8093        LOGE("Failed to translate reproc request");
8094        return rc;
8095    }
8096
8097    CameraMetadata frame_settings;
8098    frame_settings = request->settings;
8099    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8100            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8101        int32_t *crop_count =
8102                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8103        int32_t *crop_data =
8104                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8105        int32_t *roi_map =
8106                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8107        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8108            cam_crop_data_t crop_meta;
8109            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8110            crop_meta.num_of_streams = 1;
8111            crop_meta.crop_info[0].crop.left   = crop_data[0];
8112            crop_meta.crop_info[0].crop.top    = crop_data[1];
8113            crop_meta.crop_info[0].crop.width  = crop_data[2];
8114            crop_meta.crop_info[0].crop.height = crop_data[3];
8115
8116            crop_meta.crop_info[0].roi_map.left =
8117                    roi_map[0];
8118            crop_meta.crop_info[0].roi_map.top =
8119                    roi_map[1];
8120            crop_meta.crop_info[0].roi_map.width =
8121                    roi_map[2];
8122            crop_meta.crop_info[0].roi_map.height =
8123                    roi_map[3];
8124
8125            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8126                rc = BAD_VALUE;
8127            }
8128            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8129                    request->input_buffer->stream,
8130                    crop_meta.crop_info[0].crop.left,
8131                    crop_meta.crop_info[0].crop.top,
8132                    crop_meta.crop_info[0].crop.width,
8133                    crop_meta.crop_info[0].crop.height);
8134            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8135                    request->input_buffer->stream,
8136                    crop_meta.crop_info[0].roi_map.left,
8137                    crop_meta.crop_info[0].roi_map.top,
8138                    crop_meta.crop_info[0].roi_map.width,
8139                    crop_meta.crop_info[0].roi_map.height);
8140            } else {
8141                LOGE("Invalid reprocess crop count %d!", *crop_count);
8142            }
8143    } else {
8144        LOGE("No crop data from matching output stream");
8145    }
8146
8147    /* These settings are not needed for regular requests so handle them specially for
8148       reprocess requests; information needed for EXIF tags */
8149    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8150        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8151                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8152        if (NAME_NOT_FOUND != val) {
8153            uint32_t flashMode = (uint32_t)val;
8154            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8155                rc = BAD_VALUE;
8156            }
8157        } else {
8158            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8159                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8160        }
8161    } else {
8162        LOGH("No flash mode in reprocess settings");
8163    }
8164
8165    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8166        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8167        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8168            rc = BAD_VALUE;
8169        }
8170    } else {
8171        LOGH("No flash state in reprocess settings");
8172    }
8173
8174    return rc;
8175}
8176
8177/*===========================================================================
8178 * FUNCTION   : saveRequestSettings
8179 *
8180 * DESCRIPTION: Add any settings that might have changed to the request settings
8181 *              and save the settings to be applied on the frame
8182 *
8183 * PARAMETERS :
8184 *   @jpegMetadata : the extracted and/or modified jpeg metadata
8185 *   @request      : request with initial settings
8186 *
8187 * RETURN     :
8188 * camera_metadata_t* : pointer to the saved request settings
8189 *==========================================================================*/
8190camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8191        const CameraMetadata &jpegMetadata,
8192        camera3_capture_request_t *request)
8193{
8194    camera_metadata_t *resultMetadata;
8195    CameraMetadata camMetadata;
8196    camMetadata = request->settings;
8197
8198    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8199        int32_t thumbnail_size[2];
8200        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8201        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8202        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8203                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8204    }
8205
8206    resultMetadata = camMetadata.release();
8207    return resultMetadata;
8208}
8209
8210/*===========================================================================
8211 * FUNCTION   : setHalFpsRange
8212 *
8213 * DESCRIPTION: set FPS range parameter
8214 *
8215 *
8216 * PARAMETERS :
8217 *   @settings    : Metadata from framework
8218 *   @hal_metadata: Metadata buffer
8219 *
8220 *
8221 * RETURN     : success: NO_ERROR
8222 *              failure:
8223 *==========================================================================*/
8224int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8225        metadata_buffer_t *hal_metadata)
8226{
8227    int32_t rc = NO_ERROR;
8228    cam_fps_range_t fps_range;
8229    fps_range.min_fps = (float)
8230            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8231    fps_range.max_fps = (float)
8232            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8233    fps_range.video_min_fps = fps_range.min_fps;
8234    fps_range.video_max_fps = fps_range.max_fps;
8235
8236    LOGD("aeTargetFpsRange fps: [%f %f]",
8237            fps_range.min_fps, fps_range.max_fps);
8238    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8239     * follows:
8240     * ---------------------------------------------------------------|
8241     *      Video stream is absent in configure_streams               |
8242     *    (Camcorder preview before the first video record            |
8243     * ---------------------------------------------------------------|
8244     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8245     *                   |             |             | vid_min/max_fps|
8246     * ---------------------------------------------------------------|
8247     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8248     *                   |-------------|-------------|----------------|
8249     *                   |  [240, 240] |     240     |  [240, 240]    |
8250     * ---------------------------------------------------------------|
8251     *     Video stream is present in configure_streams               |
8252     * ---------------------------------------------------------------|
8253     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8254     *                   |             |             | vid_min/max_fps|
8255     * ---------------------------------------------------------------|
8256     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8257     * (camcorder prev   |-------------|-------------|----------------|
8258     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
8259     *  is stopped)      |             |             |                |
8260     * ---------------------------------------------------------------|
8261     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
8262     *                   |-------------|-------------|----------------|
8263     *                   |  [240, 240] |     240     |  [240, 240]    |
8264     * ---------------------------------------------------------------|
8265     * When Video stream is absent in configure_streams,
8266     * preview fps = sensor_fps / batchsize
8267     * Eg: for 240fps at batchSize 4, preview = 60fps
8268     *     for 120fps at batchSize 4, preview = 30fps
8269     *
8270     * When video stream is present in configure_streams, preview fps is as per
8271     * the ratio of preview buffers to video buffers requested in process
8272     * capture request
8273     */
8274    mBatchSize = 0;
8275    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8276        fps_range.min_fps = fps_range.video_max_fps;
8277        fps_range.video_min_fps = fps_range.video_max_fps;
8278        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8279                fps_range.max_fps);
8280        if (NAME_NOT_FOUND != val) {
8281            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8282            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8283                return BAD_VALUE;
8284            }
8285
8286            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8287                /* If batchmode is currently in progress and the fps changes,
8288                 * set the flag to restart the sensor */
8289                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8290                        (mHFRVideoFps != fps_range.max_fps)) {
8291                    mNeedSensorRestart = true;
8292                }
8293                mHFRVideoFps = fps_range.max_fps;
8294                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8295                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8296                    mBatchSize = MAX_HFR_BATCH_SIZE;
8297                }
8298             }
8299            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
8300
8301         }
8302    } else {
8303        /* HFR mode is session param in backend/ISP. This should be reset when
8304         * in non-HFR mode  */
8305        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8306        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8307            return BAD_VALUE;
8308        }
8309    }
8310    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8311        return BAD_VALUE;
8312    }
8313    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
8314            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8315    return rc;
8316}
8317
8318/*===========================================================================
8319 * FUNCTION   : translateToHalMetadata
8320 *
8321 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
8322 *
8323 *
8324 * PARAMETERS :
8325 *   @request  : request sent from framework
8326 *
8327 *
8328 * RETURN     : success: NO_ERROR
8329 *              failure:
8330 *==========================================================================*/
8331int QCamera3HardwareInterface::translateToHalMetadata
8332                                  (const camera3_capture_request_t *request,
8333                                   metadata_buffer_t *hal_metadata,
8334                                   uint32_t snapshotStreamId)
8335{
8336    int rc = 0;
8337    CameraMetadata frame_settings;
8338    frame_settings = request->settings;
8339
8340    /* Do not change the order of the following list unless you know what you are
8341     * doing.
8342     * The order is laid out in such a way that parameters in the front of the table
8343     * may be used to override the parameters later in the table. Examples are:
8344     * 1. META_MODE should precede AEC/AWB/AF MODE
8345     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
8346     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
8347     * 4. Any mode should precede it's corresponding settings
8348     */
8349    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
8350        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
8351        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
8352            rc = BAD_VALUE;
8353        }
8354        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8355        if (rc != NO_ERROR) {
8356            LOGE("extractSceneMode failed");
8357        }
8358    }
8359
8360    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8361        uint8_t fwk_aeMode =
8362            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8363        uint8_t aeMode;
8364        int32_t redeye;
8365
8366        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8367            aeMode = CAM_AE_MODE_OFF;
8368        } else {
8369            aeMode = CAM_AE_MODE_ON;
8370        }
8371        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8372            redeye = 1;
8373        } else {
8374            redeye = 0;
8375        }
8376
8377        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8378                fwk_aeMode);
8379        if (NAME_NOT_FOUND != val) {
8380            int32_t flashMode = (int32_t)val;
8381            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8382        }
8383
8384        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8385        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8386            rc = BAD_VALUE;
8387        }
8388    }
8389
8390    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8391        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8392        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8393                fwk_whiteLevel);
8394        if (NAME_NOT_FOUND != val) {
8395            uint8_t whiteLevel = (uint8_t)val;
8396            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8397                rc = BAD_VALUE;
8398            }
8399        }
8400    }
8401
8402    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8403        uint8_t fwk_cacMode =
8404                frame_settings.find(
8405                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8406        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8407                fwk_cacMode);
8408        if (NAME_NOT_FOUND != val) {
8409            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8410            bool entryAvailable = FALSE;
8411            // Check whether Frameworks set CAC mode is supported in device or not
8412            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8413                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
8414                    entryAvailable = TRUE;
8415                    break;
8416                }
8417            }
8418            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
8419            // If entry not found then set the device supported mode instead of frameworks mode i.e,
8420            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
8421            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
8422            if (entryAvailable == FALSE) {
8423                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8424                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8425                } else {
8426                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8427                        // High is not supported and so set the FAST as spec say's underlying
8428                        // device implementation can be the same for both modes.
8429                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
8430                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8431                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
8432                        // in order to avoid the fps drop due to high quality
8433                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8434                    } else {
8435                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8436                    }
8437                }
8438            }
8439            LOGD("Final cacMode is %d", cacMode);
8440            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8441                rc = BAD_VALUE;
8442            }
8443        } else {
8444            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
8445        }
8446    }
8447
8448    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8449        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8450        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8451                fwk_focusMode);
8452        if (NAME_NOT_FOUND != val) {
8453            uint8_t focusMode = (uint8_t)val;
8454            LOGD("set focus mode %d", focusMode);
8455            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8456                rc = BAD_VALUE;
8457            }
8458        }
8459    }
8460
8461    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8462        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8463        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8464                focalDistance)) {
8465            rc = BAD_VALUE;
8466        }
8467    }
8468
8469    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8470        uint8_t fwk_antibandingMode =
8471                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8472        int val = lookupHalName(ANTIBANDING_MODES_MAP,
8473                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8474        if (NAME_NOT_FOUND != val) {
8475            uint32_t hal_antibandingMode = (uint32_t)val;
8476            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8477                    hal_antibandingMode)) {
8478                rc = BAD_VALUE;
8479            }
8480        }
8481    }
8482
8483    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8484        int32_t expCompensation = frame_settings.find(
8485                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8486        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8487            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8488        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8489            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8490        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8491                expCompensation)) {
8492            rc = BAD_VALUE;
8493        }
8494    }
8495
8496    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8497        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8498        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8499            rc = BAD_VALUE;
8500        }
8501    }
8502    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8503        rc = setHalFpsRange(frame_settings, hal_metadata);
8504        if (rc != NO_ERROR) {
8505            LOGE("setHalFpsRange failed");
8506        }
8507    }
8508
8509    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8510        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8511        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8512            rc = BAD_VALUE;
8513        }
8514    }
8515
8516    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8517        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8518        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8519                fwk_effectMode);
8520        if (NAME_NOT_FOUND != val) {
8521            uint8_t effectMode = (uint8_t)val;
8522            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8523                rc = BAD_VALUE;
8524            }
8525        }
8526    }
8527
8528    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8529        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8530        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8531                colorCorrectMode)) {
8532            rc = BAD_VALUE;
8533        }
8534    }
8535
8536    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8537        cam_color_correct_gains_t colorCorrectGains;
8538        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8539            colorCorrectGains.gains[i] =
8540                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8541        }
8542        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8543                colorCorrectGains)) {
8544            rc = BAD_VALUE;
8545        }
8546    }
8547
8548    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8549        cam_color_correct_matrix_t colorCorrectTransform;
8550        cam_rational_type_t transform_elem;
8551        size_t num = 0;
8552        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8553           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8554              transform_elem.numerator =
8555                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8556              transform_elem.denominator =
8557                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8558              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8559              num++;
8560           }
8561        }
8562        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8563                colorCorrectTransform)) {
8564            rc = BAD_VALUE;
8565        }
8566    }
8567
8568    cam_trigger_t aecTrigger;
8569    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8570    aecTrigger.trigger_id = -1;
8571    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8572        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8573        aecTrigger.trigger =
8574            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8575        aecTrigger.trigger_id =
8576            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8577        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8578                aecTrigger)) {
8579            rc = BAD_VALUE;
8580        }
8581        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
8582                aecTrigger.trigger, aecTrigger.trigger_id);
8583    }
8584
8585    /*af_trigger must come with a trigger id*/
8586    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8587        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8588        cam_trigger_t af_trigger;
8589        af_trigger.trigger =
8590            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8591        af_trigger.trigger_id =
8592            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8593        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8594            rc = BAD_VALUE;
8595        }
8596        LOGD("AfTrigger: %d AfTriggerID: %d",
8597                af_trigger.trigger, af_trigger.trigger_id);
8598    }
8599
8600    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8601        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8602        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8603            rc = BAD_VALUE;
8604        }
8605    }
8606    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8607        cam_edge_application_t edge_application;
8608        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8609        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8610            edge_application.sharpness = 0;
8611        } else {
8612            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8613        }
8614        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8615            rc = BAD_VALUE;
8616        }
8617    }
8618
8619    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8620        int32_t respectFlashMode = 1;
8621        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8622            uint8_t fwk_aeMode =
8623                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8624            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8625                respectFlashMode = 0;
8626                LOGH("AE Mode controls flash, ignore android.flash.mode");
8627            }
8628        }
8629        if (respectFlashMode) {
8630            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8631                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8632            LOGH("flash mode after mapping %d", val);
8633            // To check: CAM_INTF_META_FLASH_MODE usage
8634            if (NAME_NOT_FOUND != val) {
8635                uint8_t flashMode = (uint8_t)val;
8636                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8637                    rc = BAD_VALUE;
8638                }
8639            }
8640        }
8641    }
8642
8643    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8644        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8645        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8646            rc = BAD_VALUE;
8647        }
8648    }
8649
8650    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8651        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8652        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8653                flashFiringTime)) {
8654            rc = BAD_VALUE;
8655        }
8656    }
8657
8658    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8659        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8660        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8661                hotPixelMode)) {
8662            rc = BAD_VALUE;
8663        }
8664    }
8665
8666    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8667        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8668        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8669                lensAperture)) {
8670            rc = BAD_VALUE;
8671        }
8672    }
8673
8674    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8675        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8676        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8677                filterDensity)) {
8678            rc = BAD_VALUE;
8679        }
8680    }
8681
8682    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8683        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8684        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8685                focalLength)) {
8686            rc = BAD_VALUE;
8687        }
8688    }
8689
8690    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8691        uint8_t optStabMode =
8692                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8693        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8694                optStabMode)) {
8695            rc = BAD_VALUE;
8696        }
8697    }
8698
8699    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8700        uint8_t videoStabMode =
8701                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8702        LOGD("videoStabMode from APP = %d", videoStabMode);
8703        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8704                videoStabMode)) {
8705            rc = BAD_VALUE;
8706        }
8707    }
8708
8709
8710    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8711        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8712        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8713                noiseRedMode)) {
8714            rc = BAD_VALUE;
8715        }
8716    }
8717
8718    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8719        float reprocessEffectiveExposureFactor =
8720            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8721        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8722                reprocessEffectiveExposureFactor)) {
8723            rc = BAD_VALUE;
8724        }
8725    }
8726
8727    cam_crop_region_t scalerCropRegion;
8728    bool scalerCropSet = false;
8729    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8730        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8731        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8732        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8733        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8734
8735        // Map coordinate system from active array to sensor output.
8736        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8737                scalerCropRegion.width, scalerCropRegion.height);
8738
8739        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8740                scalerCropRegion)) {
8741            rc = BAD_VALUE;
8742        }
8743        scalerCropSet = true;
8744    }
8745
8746    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8747        int64_t sensorExpTime =
8748                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8749        LOGD("setting sensorExpTime %lld", sensorExpTime);
8750        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8751                sensorExpTime)) {
8752            rc = BAD_VALUE;
8753        }
8754    }
8755
8756    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8757        int64_t sensorFrameDuration =
8758                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8759        int64_t minFrameDuration = getMinFrameDuration(request);
8760        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8761        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8762            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8763        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
8764        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8765                sensorFrameDuration)) {
8766            rc = BAD_VALUE;
8767        }
8768    }
8769
8770    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8771        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8772        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8773                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8774        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8775                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8776        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
8777        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8778                sensorSensitivity)) {
8779            rc = BAD_VALUE;
8780        }
8781    }
8782
8783    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
8784        int32_t ispSensitivity =
8785            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
8786        if (ispSensitivity <
8787            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
8788                ispSensitivity =
8789                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8790                LOGD("clamp ispSensitivity to %d", ispSensitivity);
8791        }
8792        if (ispSensitivity >
8793            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
8794                ispSensitivity =
8795                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
8796                LOGD("clamp ispSensitivity to %d", ispSensitivity);
8797        }
8798        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
8799                ispSensitivity)) {
8800            rc = BAD_VALUE;
8801        }
8802    }
8803
8804    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8805        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8806        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8807            rc = BAD_VALUE;
8808        }
8809    }
8810
8811    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8812        uint8_t fwk_facedetectMode =
8813                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8814
8815        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8816                fwk_facedetectMode);
8817
8818        if (NAME_NOT_FOUND != val) {
8819            uint8_t facedetectMode = (uint8_t)val;
8820            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8821                    facedetectMode)) {
8822                rc = BAD_VALUE;
8823            }
8824        }
8825    }
8826
8827    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8828        uint8_t histogramMode =
8829                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8830        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8831                histogramMode)) {
8832            rc = BAD_VALUE;
8833        }
8834    }
8835
8836    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8837        uint8_t sharpnessMapMode =
8838                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8839        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8840                sharpnessMapMode)) {
8841            rc = BAD_VALUE;
8842        }
8843    }
8844
8845    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8846        uint8_t tonemapMode =
8847                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8848        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8849            rc = BAD_VALUE;
8850        }
8851    }
8852    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8853    /*All tonemap channels will have the same number of points*/
8854    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8855        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8856        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8857        cam_rgb_tonemap_curves tonemapCurves;
8858        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8859        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8860            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
8861                     tonemapCurves.tonemap_points_cnt,
8862                    CAM_MAX_TONEMAP_CURVE_SIZE);
8863            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8864        }
8865
8866        /* ch0 = G*/
8867        size_t point = 0;
8868        cam_tonemap_curve_t tonemapCurveGreen;
8869        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8870            for (size_t j = 0; j < 2; j++) {
8871               tonemapCurveGreen.tonemap_points[i][j] =
8872                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8873               point++;
8874            }
8875        }
8876        tonemapCurves.curves[0] = tonemapCurveGreen;
8877
8878        /* ch 1 = B */
8879        point = 0;
8880        cam_tonemap_curve_t tonemapCurveBlue;
8881        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8882            for (size_t j = 0; j < 2; j++) {
8883               tonemapCurveBlue.tonemap_points[i][j] =
8884                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8885               point++;
8886            }
8887        }
8888        tonemapCurves.curves[1] = tonemapCurveBlue;
8889
8890        /* ch 2 = R */
8891        point = 0;
8892        cam_tonemap_curve_t tonemapCurveRed;
8893        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8894            for (size_t j = 0; j < 2; j++) {
8895               tonemapCurveRed.tonemap_points[i][j] =
8896                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8897               point++;
8898            }
8899        }
8900        tonemapCurves.curves[2] = tonemapCurveRed;
8901
8902        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8903                tonemapCurves)) {
8904            rc = BAD_VALUE;
8905        }
8906    }
8907
8908    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8909        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8910        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8911                captureIntent)) {
8912            rc = BAD_VALUE;
8913        }
8914    }
8915
8916    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8917        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8918        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8919                blackLevelLock)) {
8920            rc = BAD_VALUE;
8921        }
8922    }
8923
8924    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8925        uint8_t lensShadingMapMode =
8926                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8927        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8928                lensShadingMapMode)) {
8929            rc = BAD_VALUE;
8930        }
8931    }
8932
8933    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8934        cam_area_t roi;
8935        bool reset = true;
8936        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8937
8938        // Map coordinate system from active array to sensor output.
8939        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8940                roi.rect.height);
8941
8942        if (scalerCropSet) {
8943            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8944        }
8945        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8946            rc = BAD_VALUE;
8947        }
8948    }
8949
8950    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8951        cam_area_t roi;
8952        bool reset = true;
8953        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8954
8955        // Map coordinate system from active array to sensor output.
8956        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8957                roi.rect.height);
8958
8959        if (scalerCropSet) {
8960            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8961        }
8962        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8963            rc = BAD_VALUE;
8964        }
8965    }
8966
8967    // CDS for non-HFR non-video mode
8968    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8969            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
8970        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8971        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8972            LOGE("Invalid CDS mode %d!", *fwk_cds);
8973        } else {
8974            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8975                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8976                rc = BAD_VALUE;
8977            }
8978        }
8979    }
8980
8981    // TNR
8982    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8983        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8984        uint8_t b_TnrRequested = 0;
8985        cam_denoise_param_t tnr;
8986        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8987        tnr.process_plates =
8988            (cam_denoise_process_type_t)frame_settings.find(
8989            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8990        b_TnrRequested = tnr.denoise_enable;
8991        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8992            rc = BAD_VALUE;
8993        }
8994    }
8995
8996    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8997        int32_t fwk_testPatternMode =
8998                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8999        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9000                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9001
9002        if (NAME_NOT_FOUND != testPatternMode) {
9003            cam_test_pattern_data_t testPatternData;
9004            memset(&testPatternData, 0, sizeof(testPatternData));
9005            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9006            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9007                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9008                int32_t *fwk_testPatternData =
9009                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9010                testPatternData.r = fwk_testPatternData[0];
9011                testPatternData.b = fwk_testPatternData[3];
9012                switch (gCamCapability[mCameraId]->color_arrangement) {
9013                    case CAM_FILTER_ARRANGEMENT_RGGB:
9014                    case CAM_FILTER_ARRANGEMENT_GRBG:
9015                        testPatternData.gr = fwk_testPatternData[1];
9016                        testPatternData.gb = fwk_testPatternData[2];
9017                        break;
9018                    case CAM_FILTER_ARRANGEMENT_GBRG:
9019                    case CAM_FILTER_ARRANGEMENT_BGGR:
9020                        testPatternData.gr = fwk_testPatternData[2];
9021                        testPatternData.gb = fwk_testPatternData[1];
9022                        break;
9023                    default:
9024                        LOGE("color arrangement %d is not supported",
9025                                gCamCapability[mCameraId]->color_arrangement);
9026                        break;
9027                }
9028            }
9029            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9030                    testPatternData)) {
9031                rc = BAD_VALUE;
9032            }
9033        } else {
9034            LOGE("Invalid framework sensor test pattern mode %d",
9035                    fwk_testPatternMode);
9036        }
9037    }
9038
9039    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9040        size_t count = 0;
9041        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9042        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9043                gps_coords.data.d, gps_coords.count, count);
9044        if (gps_coords.count != count) {
9045            rc = BAD_VALUE;
9046        }
9047    }
9048
9049    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9050        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9051        size_t count = 0;
9052        const char *gps_methods_src = (const char *)
9053                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9054        memset(gps_methods, '\0', sizeof(gps_methods));
9055        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9056        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9057                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9058        if (GPS_PROCESSING_METHOD_SIZE != count) {
9059            rc = BAD_VALUE;
9060        }
9061    }
9062
9063    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9064        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9065        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9066                gps_timestamp)) {
9067            rc = BAD_VALUE;
9068        }
9069    }
9070
9071    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9072        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9073        cam_rotation_info_t rotation_info;
9074        if (orientation == 0) {
9075           rotation_info.rotation = ROTATE_0;
9076        } else if (orientation == 90) {
9077           rotation_info.rotation = ROTATE_90;
9078        } else if (orientation == 180) {
9079           rotation_info.rotation = ROTATE_180;
9080        } else if (orientation == 270) {
9081           rotation_info.rotation = ROTATE_270;
9082        }
9083        rotation_info.streamId = snapshotStreamId;
9084        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9085        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9086            rc = BAD_VALUE;
9087        }
9088    }
9089
9090    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9091        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9092        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9093            rc = BAD_VALUE;
9094        }
9095    }
9096
9097    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9098        uint32_t thumb_quality = (uint32_t)
9099                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9100        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9101                thumb_quality)) {
9102            rc = BAD_VALUE;
9103        }
9104    }
9105
9106    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9107        cam_dimension_t dim;
9108        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9109        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9110        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9111            rc = BAD_VALUE;
9112        }
9113    }
9114
9115    // Internal metadata
9116    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9117        size_t count = 0;
9118        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9119        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9120                privatedata.data.i32, privatedata.count, count);
9121        if (privatedata.count != count) {
9122            rc = BAD_VALUE;
9123        }
9124    }
9125
9126    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
9127        uint8_t* use_av_timer =
9128                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
9129        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
9130            rc = BAD_VALUE;
9131        }
9132    }
9133
9134    // EV step
9135    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9136            gCamCapability[mCameraId]->exp_compensation_step)) {
9137        rc = BAD_VALUE;
9138    }
9139
9140    // CDS info
9141    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9142        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9143                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9144
9145        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9146                CAM_INTF_META_CDS_DATA, *cdsData)) {
9147            rc = BAD_VALUE;
9148        }
9149    }
9150
9151    return rc;
9152}
9153
9154/*===========================================================================
9155 * FUNCTION   : captureResultCb
9156 *
9157 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9158 *
9159 * PARAMETERS :
9160 *   @frame  : frame information from mm-camera-interface
9161 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9162 *   @userdata: userdata
9163 *
9164 * RETURN     : NONE
9165 *==========================================================================*/
9166void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9167                camera3_stream_buffer_t *buffer,
9168                uint32_t frame_number, bool isInputBuffer, void *userdata)
9169{
9170    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9171    if (hw == NULL) {
9172        LOGE("Invalid hw %p", hw);
9173        return;
9174    }
9175
9176    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9177    return;
9178}
9179
9180
9181/*===========================================================================
9182 * FUNCTION   : initialize
9183 *
9184 * DESCRIPTION: Pass framework callback pointers to HAL
9185 *
9186 * PARAMETERS :
9187 *
9188 *
9189 * RETURN     : Success : 0
9190 *              Failure: -ENODEV
9191 *==========================================================================*/
9192
9193int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9194                                  const camera3_callback_ops_t *callback_ops)
9195{
9196    LOGD("E");
9197    QCamera3HardwareInterface *hw =
9198        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9199    if (!hw) {
9200        LOGE("NULL camera device");
9201        return -ENODEV;
9202    }
9203
9204    int rc = hw->initialize(callback_ops);
9205    LOGD("X");
9206    return rc;
9207}
9208
9209/*===========================================================================
9210 * FUNCTION   : configure_streams
9211 *
9212 * DESCRIPTION:
9213 *
9214 * PARAMETERS :
9215 *
9216 *
9217 * RETURN     : Success: 0
9218 *              Failure: -EINVAL (if stream configuration is invalid)
9219 *                       -ENODEV (fatal error)
9220 *==========================================================================*/
9221
9222int QCamera3HardwareInterface::configure_streams(
9223        const struct camera3_device *device,
9224        camera3_stream_configuration_t *stream_list)
9225{
9226    LOGD("E");
9227    QCamera3HardwareInterface *hw =
9228        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9229    if (!hw) {
9230        LOGE("NULL camera device");
9231        return -ENODEV;
9232    }
9233    int rc = hw->configureStreams(stream_list);
9234    LOGD("X");
9235    return rc;
9236}
9237
9238/*===========================================================================
9239 * FUNCTION   : construct_default_request_settings
9240 *
9241 * DESCRIPTION: Configure a settings buffer to meet the required use case
9242 *
9243 * PARAMETERS :
9244 *
9245 *
9246 * RETURN     : Success: Return valid metadata
9247 *              Failure: Return NULL
9248 *==========================================================================*/
9249const camera_metadata_t* QCamera3HardwareInterface::
9250    construct_default_request_settings(const struct camera3_device *device,
9251                                        int type)
9252{
9253
9254    LOGD("E");
9255    camera_metadata_t* fwk_metadata = NULL;
9256    QCamera3HardwareInterface *hw =
9257        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9258    if (!hw) {
9259        LOGE("NULL camera device");
9260        return NULL;
9261    }
9262
9263    fwk_metadata = hw->translateCapabilityToMetadata(type);
9264
9265    LOGD("X");
9266    return fwk_metadata;
9267}
9268
9269/*===========================================================================
9270 * FUNCTION   : process_capture_request
9271 *
9272 * DESCRIPTION:
9273 *
9274 * PARAMETERS :
9275 *
9276 *
9277 * RETURN     :
9278 *==========================================================================*/
9279int QCamera3HardwareInterface::process_capture_request(
9280                    const struct camera3_device *device,
9281                    camera3_capture_request_t *request)
9282{
9283    LOGD("E");
9284    QCamera3HardwareInterface *hw =
9285        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9286    if (!hw) {
9287        LOGE("NULL camera device");
9288        return -EINVAL;
9289    }
9290
9291    int rc = hw->processCaptureRequest(request);
9292    LOGD("X");
9293    return rc;
9294}
9295
9296/*===========================================================================
9297 * FUNCTION   : dump
9298 *
9299 * DESCRIPTION:
9300 *
9301 * PARAMETERS :
9302 *
9303 *
9304 * RETURN     :
9305 *==========================================================================*/
9306
9307void QCamera3HardwareInterface::dump(
9308                const struct camera3_device *device, int fd)
9309{
9310    /* Log level property is read when "adb shell dumpsys media.camera" is
9311       called so that the log level can be controlled without restarting
9312       the media server */
9313    getLogLevel();
9314
9315    LOGD("E");
9316    QCamera3HardwareInterface *hw =
9317        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9318    if (!hw) {
9319        LOGE("NULL camera device");
9320        return;
9321    }
9322
9323    hw->dump(fd);
9324    LOGD("X");
9325    return;
9326}
9327
9328/*===========================================================================
9329 * FUNCTION   : flush
9330 *
9331 * DESCRIPTION:
9332 *
9333 * PARAMETERS :
9334 *
9335 *
9336 * RETURN     :
9337 *==========================================================================*/
9338
9339int QCamera3HardwareInterface::flush(
9340                const struct camera3_device *device)
9341{
9342    int rc;
9343    LOGD("E");
9344    QCamera3HardwareInterface *hw =
9345        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9346    if (!hw) {
9347        LOGE("NULL camera device");
9348        return -EINVAL;
9349    }
9350
9351    pthread_mutex_lock(&hw->mMutex);
9352    // Validate current state
9353    switch (hw->mState) {
9354        case STARTED:
9355            /* valid state */
9356            break;
9357
9358        case ERROR:
9359            pthread_mutex_unlock(&hw->mMutex);
9360            hw->handleCameraDeviceError();
9361            return -ENODEV;
9362
9363        default:
9364            LOGI("Flush returned during state %d", hw->mState);
9365            pthread_mutex_unlock(&hw->mMutex);
9366            return 0;
9367    }
9368    pthread_mutex_unlock(&hw->mMutex);
9369
9370    rc = hw->flush(true /* restart channels */ );
9371    LOGD("X");
9372    return rc;
9373}
9374
9375/*===========================================================================
9376 * FUNCTION   : close_camera_device
9377 *
9378 * DESCRIPTION:
9379 *
9380 * PARAMETERS :
9381 *
9382 *
9383 * RETURN     :
9384 *==========================================================================*/
9385int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
9386{
9387    int ret = NO_ERROR;
9388    QCamera3HardwareInterface *hw =
9389        reinterpret_cast<QCamera3HardwareInterface *>(
9390            reinterpret_cast<camera3_device_t *>(device)->priv);
9391    if (!hw) {
9392        LOGE("NULL camera device");
9393        return BAD_VALUE;
9394    }
9395
9396    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
9397    delete hw;
9398    LOGI("[KPI Perf]: X");
9399    return ret;
9400}
9401
9402/*===========================================================================
9403 * FUNCTION   : getWaveletDenoiseProcessPlate
9404 *
9405 * DESCRIPTION: query wavelet denoise process plate
9406 *
9407 * PARAMETERS : None
9408 *
9409 * RETURN     : WNR prcocess plate value
9410 *==========================================================================*/
9411cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9412{
9413    char prop[PROPERTY_VALUE_MAX];
9414    memset(prop, 0, sizeof(prop));
9415    property_get("persist.denoise.process.plates", prop, "0");
9416    int processPlate = atoi(prop);
9417    switch(processPlate) {
9418    case 0:
9419        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9420    case 1:
9421        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9422    case 2:
9423        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9424    case 3:
9425        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9426    default:
9427        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9428    }
9429}
9430
9431
9432/*===========================================================================
9433 * FUNCTION   : getTemporalDenoiseProcessPlate
9434 *
9435 * DESCRIPTION: query temporal denoise process plate
9436 *
9437 * PARAMETERS : None
9438 *
9439 * RETURN     : TNR prcocess plate value
9440 *==========================================================================*/
9441cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9442{
9443    char prop[PROPERTY_VALUE_MAX];
9444    memset(prop, 0, sizeof(prop));
9445    property_get("persist.tnr.process.plates", prop, "0");
9446    int processPlate = atoi(prop);
9447    switch(processPlate) {
9448    case 0:
9449        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9450    case 1:
9451        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9452    case 2:
9453        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9454    case 3:
9455        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9456    default:
9457        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9458    }
9459}
9460
9461
9462/*===========================================================================
9463 * FUNCTION   : extractSceneMode
9464 *
9465 * DESCRIPTION: Extract scene mode from frameworks set metadata
9466 *
9467 * PARAMETERS :
9468 *      @frame_settings: CameraMetadata reference
9469 *      @metaMode: ANDROID_CONTORL_MODE
9470 *      @hal_metadata: hal metadata structure
9471 *
9472 * RETURN     : None
9473 *==========================================================================*/
9474int32_t QCamera3HardwareInterface::extractSceneMode(
9475        const CameraMetadata &frame_settings, uint8_t metaMode,
9476        metadata_buffer_t *hal_metadata)
9477{
9478    int32_t rc = NO_ERROR;
9479
9480    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9481        camera_metadata_ro_entry entry =
9482                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9483        if (0 == entry.count)
9484            return rc;
9485
9486        uint8_t fwk_sceneMode = entry.data.u8[0];
9487
9488        int val = lookupHalName(SCENE_MODES_MAP,
9489                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9490                fwk_sceneMode);
9491        if (NAME_NOT_FOUND != val) {
9492            uint8_t sceneMode = (uint8_t)val;
9493            LOGD("sceneMode: %d", sceneMode);
9494            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9495                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9496                rc = BAD_VALUE;
9497            }
9498        }
9499    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9500            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9501        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9502        LOGD("sceneMode: %d", sceneMode);
9503        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9504                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9505            rc = BAD_VALUE;
9506        }
9507    }
9508    return rc;
9509}
9510
9511/*===========================================================================
9512 * FUNCTION   : needRotationReprocess
9513 *
9514 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9515 *
9516 * PARAMETERS : none
9517 *
9518 * RETURN     : true: needed
9519 *              false: no need
9520 *==========================================================================*/
9521bool QCamera3HardwareInterface::needRotationReprocess()
9522{
9523    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9524        // current rotation is not zero, and pp has the capability to process rotation
9525        LOGH("need do reprocess for rotation");
9526        return true;
9527    }
9528
9529    return false;
9530}
9531
9532/*===========================================================================
9533 * FUNCTION   : needReprocess
9534 *
9535 * DESCRIPTION: if reprocess in needed
9536 *
9537 * PARAMETERS : none
9538 *
9539 * RETURN     : true: needed
9540 *              false: no need
9541 *==========================================================================*/
9542bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
9543{
9544    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
9545        // TODO: add for ZSL HDR later
9546        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9547        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9548            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
9549            return true;
9550        } else {
9551            LOGH("already post processed frame");
9552            return false;
9553        }
9554    }
9555    return needRotationReprocess();
9556}
9557
9558/*===========================================================================
9559 * FUNCTION   : needJpegRotation
9560 *
9561 * DESCRIPTION: if rotation from jpeg is needed
9562 *
9563 * PARAMETERS : none
9564 *
9565 * RETURN     : true: needed
9566 *              false: no need
9567 *==========================================================================*/
9568bool QCamera3HardwareInterface::needJpegRotation()
9569{
9570   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9571    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9572       LOGD("Need Jpeg to do the rotation");
9573       return true;
9574    }
9575    return false;
9576}
9577
9578/*===========================================================================
9579 * FUNCTION   : addOfflineReprocChannel
9580 *
9581 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9582 *              coming from input channel
9583 *
9584 * PARAMETERS :
9585 *   @config  : reprocess configuration
9586 *   @inputChHandle : pointer to the input (source) channel
9587 *
9588 *
9589 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
9590 *==========================================================================*/
9591QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9592        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9593{
9594    int32_t rc = NO_ERROR;
9595    QCamera3ReprocessChannel *pChannel = NULL;
9596
9597    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9598            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9599            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9600    if (NULL == pChannel) {
9601        LOGE("no mem for reprocess channel");
9602        return NULL;
9603    }
9604
9605    rc = pChannel->initialize(IS_TYPE_NONE);
9606    if (rc != NO_ERROR) {
9607        LOGE("init reprocess channel failed, ret = %d", rc);
9608        delete pChannel;
9609        return NULL;
9610    }
9611
9612    // pp feature config
9613    cam_pp_feature_config_t pp_config;
9614    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9615
9616    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9617    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
9618            & CAM_QCOM_FEATURE_DSDN) {
9619        //Use CPP CDS incase h/w supports it.
9620        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
9621        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
9622    }
9623
9624    rc = pChannel->addReprocStreamsFromSource(pp_config,
9625            config,
9626            IS_TYPE_NONE,
9627            mMetadataChannel);
9628
9629    if (rc != NO_ERROR) {
9630        delete pChannel;
9631        return NULL;
9632    }
9633    return pChannel;
9634}
9635
9636/*===========================================================================
9637 * FUNCTION   : getMobicatMask
9638 *
9639 * DESCRIPTION: returns mobicat mask
9640 *
9641 * PARAMETERS : none
9642 *
9643 * RETURN     : mobicat mask
9644 *
9645 *==========================================================================*/
9646uint8_t QCamera3HardwareInterface::getMobicatMask()
9647{
9648    return m_MobicatMask;
9649}
9650
9651/*===========================================================================
9652 * FUNCTION   : setMobicat
9653 *
9654 * DESCRIPTION: set Mobicat on/off.
9655 *
9656 * PARAMETERS :
9657 *   @params  : none
9658 *
9659 * RETURN     : int32_t type of status
9660 *              NO_ERROR  -- success
9661 *              none-zero failure code
9662 *==========================================================================*/
9663int32_t QCamera3HardwareInterface::setMobicat()
9664{
9665    char value [PROPERTY_VALUE_MAX];
9666    property_get("persist.camera.mobicat", value, "0");
9667    int32_t ret = NO_ERROR;
9668    uint8_t enableMobi = (uint8_t)atoi(value);
9669
9670    if (enableMobi) {
9671        tune_cmd_t tune_cmd;
9672        tune_cmd.type = SET_RELOAD_CHROMATIX;
9673        tune_cmd.module = MODULE_ALL;
9674        tune_cmd.value = TRUE;
9675        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9676                CAM_INTF_PARM_SET_VFE_COMMAND,
9677                tune_cmd);
9678
9679        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9680                CAM_INTF_PARM_SET_PP_COMMAND,
9681                tune_cmd);
9682    }
9683    m_MobicatMask = enableMobi;
9684
9685    return ret;
9686}
9687
9688/*===========================================================================
9689* FUNCTION   : getLogLevel
9690*
9691* DESCRIPTION: Reads the log level property into a variable
9692*
9693* PARAMETERS :
9694*   None
9695*
9696* RETURN     :
9697*   None
9698*==========================================================================*/
9699void QCamera3HardwareInterface::getLogLevel()
9700{
9701    char prop[PROPERTY_VALUE_MAX];
9702    uint32_t globalLogLevel = 0;
9703
9704    property_get("persist.camera.hal.debug", prop, "0");
9705    int val = atoi(prop);
9706    if (0 <= val) {
9707        gCamHal3LogLevel = (uint32_t)val;
9708    }
9709
9710    property_get("persist.camera.kpi.debug", prop, "1");
9711    gKpiDebugLevel = atoi(prop);
9712
9713    property_get("persist.camera.global.debug", prop, "0");
9714    val = atoi(prop);
9715    if (0 <= val) {
9716        globalLogLevel = (uint32_t)val;
9717    }
9718
9719    /* Highest log level among hal.logs and global.logs is selected */
9720    if (gCamHal3LogLevel < globalLogLevel)
9721        gCamHal3LogLevel = globalLogLevel;
9722
9723    return;
9724}
9725
9726/*===========================================================================
9727 * FUNCTION   : validateStreamRotations
9728 *
9729 * DESCRIPTION: Check if the rotations requested are supported
9730 *
9731 * PARAMETERS :
9732 *   @stream_list : streams to be configured
9733 *
9734 * RETURN     : NO_ERROR on success
9735 *              -EINVAL on failure
9736 *
9737 *==========================================================================*/
9738int QCamera3HardwareInterface::validateStreamRotations(
9739        camera3_stream_configuration_t *streamList)
9740{
9741    int rc = NO_ERROR;
9742
9743    /*
9744    * Loop through all streams requested in configuration
9745    * Check if unsupported rotations have been requested on any of them
9746    */
9747    for (size_t j = 0; j < streamList->num_streams; j++){
9748        camera3_stream_t *newStream = streamList->streams[j];
9749
9750        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9751        bool isImplDef = (newStream->format ==
9752                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9753        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9754                isImplDef);
9755
9756        if (isRotated && (!isImplDef || isZsl)) {
9757            LOGE("Error: Unsupported rotation of %d requested for stream"
9758                    "type:%d and stream format:%d",
9759                    newStream->rotation, newStream->stream_type,
9760                    newStream->format);
9761            rc = -EINVAL;
9762            break;
9763        }
9764    }
9765
9766    return rc;
9767}
9768
9769/*===========================================================================
9770* FUNCTION   : getFlashInfo
9771*
9772* DESCRIPTION: Retrieve information about whether the device has a flash.
9773*
9774* PARAMETERS :
9775*   @cameraId  : Camera id to query
9776*   @hasFlash  : Boolean indicating whether there is a flash device
9777*                associated with given camera
9778*   @flashNode : If a flash device exists, this will be its device node.
9779*
9780* RETURN     :
9781*   None
9782*==========================================================================*/
9783void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9784        bool& hasFlash,
9785        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9786{
9787    cam_capability_t* camCapability = gCamCapability[cameraId];
9788    if (NULL == camCapability) {
9789        hasFlash = false;
9790        flashNode[0] = '\0';
9791    } else {
9792        hasFlash = camCapability->flash_available;
9793        strlcpy(flashNode,
9794                (char*)camCapability->flash_dev_name,
9795                QCAMERA_MAX_FILEPATH_LENGTH);
9796    }
9797}
9798
9799/*===========================================================================
9800* FUNCTION   : getEepromVersionInfo
9801*
9802* DESCRIPTION: Retrieve version info of the sensor EEPROM data
9803*
9804* PARAMETERS : None
9805*
9806* RETURN     : string describing EEPROM version
9807*              "\0" if no such info available
9808*==========================================================================*/
9809const char *QCamera3HardwareInterface::getEepromVersionInfo()
9810{
9811    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9812}
9813
9814/*===========================================================================
9815* FUNCTION   : getLdafCalib
9816*
9817* DESCRIPTION: Retrieve Laser AF calibration data
9818*
9819* PARAMETERS : None
9820*
9821* RETURN     : Two uint32_t describing laser AF calibration data
9822*              NULL if none is available.
9823*==========================================================================*/
9824const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9825{
9826    if (mLdafCalibExist) {
9827        return &mLdafCalib[0];
9828    } else {
9829        return NULL;
9830    }
9831}
9832
9833/*===========================================================================
9834 * FUNCTION   : dynamicUpdateMetaStreamInfo
9835 *
9836 * DESCRIPTION: This function:
9837 *             (1) stops all the channels
9838 *             (2) returns error on pending requests and buffers
9839 *             (3) sends metastream_info in setparams
9840 *             (4) starts all channels
9841 *             This is useful when sensor has to be restarted to apply any
9842 *             settings such as frame rate from a different sensor mode
9843 *
9844 * PARAMETERS : None
9845 *
9846 * RETURN     : NO_ERROR on success
9847 *              Error codes on failure
9848 *
9849 *==========================================================================*/
9850int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9851{
9852    ATRACE_CALL();
9853    int rc = NO_ERROR;
9854
9855    LOGD("E");
9856
9857    rc = stopAllChannels();
9858    if (rc < 0) {
9859        LOGE("stopAllChannels failed");
9860        return rc;
9861    }
9862
9863    rc = notifyErrorForPendingRequests();
9864    if (rc < 0) {
9865        LOGE("notifyErrorForPendingRequests failed");
9866        return rc;
9867    }
9868
9869    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
9870        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
9871                "Format:%d",
9872                mStreamConfigInfo.type[i],
9873                mStreamConfigInfo.stream_sizes[i].width,
9874                mStreamConfigInfo.stream_sizes[i].height,
9875                mStreamConfigInfo.postprocess_mask[i],
9876                mStreamConfigInfo.format[i]);
9877    }
9878
9879    /* Send meta stream info once again so that ISP can start */
9880    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9881            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9882    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9883            mParameters);
9884    if (rc < 0) {
9885        LOGE("set Metastreaminfo failed. Sensor mode does not change");
9886    }
9887
9888    rc = startAllChannels();
9889    if (rc < 0) {
9890        LOGE("startAllChannels failed");
9891        return rc;
9892    }
9893
9894    LOGD("X");
9895    return rc;
9896}
9897
9898/*===========================================================================
9899 * FUNCTION   : stopAllChannels
9900 *
9901 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9902 *
9903 * PARAMETERS : None
9904 *
9905 * RETURN     : NO_ERROR on success
9906 *              Error codes on failure
9907 *
9908 *==========================================================================*/
9909int32_t QCamera3HardwareInterface::stopAllChannels()
9910{
9911    int32_t rc = NO_ERROR;
9912
9913    // Stop the Streams/Channels
9914    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9915        it != mStreamInfo.end(); it++) {
9916        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9917        if (channel) {
9918            channel->stop();
9919        }
9920        (*it)->status = INVALID;
9921    }
9922
9923    if (mSupportChannel) {
9924        mSupportChannel->stop();
9925    }
9926    if (mAnalysisChannel) {
9927        mAnalysisChannel->stop();
9928    }
9929    if (mRawDumpChannel) {
9930        mRawDumpChannel->stop();
9931    }
9932    if (mMetadataChannel) {
9933        /* If content of mStreamInfo is not 0, there is metadata stream */
9934        mMetadataChannel->stop();
9935    }
9936
9937    LOGD("All channels stopped");
9938    return rc;
9939}
9940
9941/*===========================================================================
9942 * FUNCTION   : startAllChannels
9943 *
9944 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9945 *
9946 * PARAMETERS : None
9947 *
9948 * RETURN     : NO_ERROR on success
9949 *              Error codes on failure
9950 *
9951 *==========================================================================*/
9952int32_t QCamera3HardwareInterface::startAllChannels()
9953{
9954    int32_t rc = NO_ERROR;
9955
9956    LOGD("Start all channels ");
9957    // Start the Streams/Channels
9958    if (mMetadataChannel) {
9959        /* If content of mStreamInfo is not 0, there is metadata stream */
9960        rc = mMetadataChannel->start();
9961        if (rc < 0) {
9962            LOGE("META channel start failed");
9963            return rc;
9964        }
9965    }
9966    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9967        it != mStreamInfo.end(); it++) {
9968        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9969        if (channel) {
9970            rc = channel->start();
9971            if (rc < 0) {
9972                LOGE("channel start failed");
9973                return rc;
9974            }
9975        }
9976    }
9977    if (mAnalysisChannel) {
9978        mAnalysisChannel->start();
9979    }
9980    if (mSupportChannel) {
9981        rc = mSupportChannel->start();
9982        if (rc < 0) {
9983            LOGE("Support channel start failed");
9984            return rc;
9985        }
9986    }
9987    if (mRawDumpChannel) {
9988        rc = mRawDumpChannel->start();
9989        if (rc < 0) {
9990            LOGE("RAW dump channel start failed");
9991            return rc;
9992        }
9993    }
9994
9995    LOGD("All channels started");
9996    return rc;
9997}
9998
9999/*===========================================================================
10000 * FUNCTION   : notifyErrorForPendingRequests
10001 *
10002 * DESCRIPTION: This function sends error for all the pending requests/buffers
10003 *
10004 * PARAMETERS : None
10005 *
10006 * RETURN     : Error codes
10007 *              NO_ERROR on success
10008 *
10009 *==========================================================================*/
10010int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10011{
10012    int32_t rc = NO_ERROR;
10013    unsigned int frameNum = 0;
10014    camera3_capture_result_t result;
10015    camera3_stream_buffer_t *pStream_Buf = NULL;
10016    FlushMap flushMap;
10017
10018    memset(&result, 0, sizeof(camera3_capture_result_t));
10019
10020    if (mPendingRequestsList.size() > 0) {
10021        pendingRequestIterator i = mPendingRequestsList.begin();
10022        frameNum = i->frame_number;
10023    } else {
10024        /* There might still be pending buffers even though there are
10025         no pending requests. Setting the frameNum to MAX so that
10026         all the buffers with smaller frame numbers are returned */
10027        frameNum = UINT_MAX;
10028    }
10029
10030    LOGH("Oldest frame num on  mPendingRequestsList = %d",
10031       frameNum);
10032
10033    // Go through the pending buffers and group them depending
10034    // on frame number
10035    for (List<PendingBufferInfo>::iterator k =
10036            mPendingBuffersMap.mPendingBufferList.begin();
10037            k != mPendingBuffersMap.mPendingBufferList.end();) {
10038
10039        if (k->frame_number < frameNum) {
10040            ssize_t idx = flushMap.indexOfKey(k->frame_number);
10041            if (idx == NAME_NOT_FOUND) {
10042                Vector<PendingBufferInfo> pending;
10043                pending.add(*k);
10044                flushMap.add(k->frame_number, pending);
10045            } else {
10046                Vector<PendingBufferInfo> &pending =
10047                        flushMap.editValueFor(k->frame_number);
10048                pending.add(*k);
10049            }
10050
10051            mPendingBuffersMap.num_buffers--;
10052            k = mPendingBuffersMap.mPendingBufferList.erase(k);
10053        } else {
10054            k++;
10055        }
10056    }
10057
10058    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
10059        uint32_t frame_number = flushMap.keyAt(iFlush);
10060        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
10061
10062        // Send Error notify to frameworks for each buffer for which
10063        // metadata buffer is already sent
10064        LOGH("Sending ERROR BUFFER for frame %d number of buffer %d",
10065           frame_number, pending.size());
10066
10067        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
10068        if (NULL == pStream_Buf) {
10069            LOGE("No memory for pending buffers array");
10070            return NO_MEMORY;
10071        }
10072        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
10073
10074        for (size_t j = 0; j < pending.size(); j++) {
10075            const PendingBufferInfo &info = pending.itemAt(j);
10076            camera3_notify_msg_t notify_msg;
10077            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10078            notify_msg.type = CAMERA3_MSG_ERROR;
10079            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10080            notify_msg.message.error.error_stream = info.stream;
10081            notify_msg.message.error.frame_number = frame_number;
10082            pStream_Buf[j].acquire_fence = -1;
10083            pStream_Buf[j].release_fence = -1;
10084            pStream_Buf[j].buffer = info.buffer;
10085            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
10086            pStream_Buf[j].stream = info.stream;
10087            mCallbackOps->notify(mCallbackOps, &notify_msg);
10088            LOGH("notify frame_number = %d stream %p",
10089                    frame_number, info.stream);
10090        }
10091
10092        result.result = NULL;
10093        result.frame_number = frame_number;
10094        result.num_output_buffers = (uint32_t)pending.size();
10095        result.output_buffers = pStream_Buf;
10096        mCallbackOps->process_capture_result(mCallbackOps, &result);
10097
10098        delete [] pStream_Buf;
10099    }
10100
10101    LOGH("Sending ERROR REQUEST for all pending requests");
10102
10103    flushMap.clear();
10104    for (List<PendingBufferInfo>::iterator k =
10105            mPendingBuffersMap.mPendingBufferList.begin();
10106            k != mPendingBuffersMap.mPendingBufferList.end();) {
10107        ssize_t idx = flushMap.indexOfKey(k->frame_number);
10108        if (idx == NAME_NOT_FOUND) {
10109            Vector<PendingBufferInfo> pending;
10110            pending.add(*k);
10111            flushMap.add(k->frame_number, pending);
10112        } else {
10113            Vector<PendingBufferInfo> &pending =
10114                    flushMap.editValueFor(k->frame_number);
10115            pending.add(*k);
10116        }
10117
10118        mPendingBuffersMap.num_buffers--;
10119        k = mPendingBuffersMap.mPendingBufferList.erase(k);
10120    }
10121
10122    pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10123
10124    // Go through the pending requests info and send error request to framework
10125    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
10126        uint32_t frame_number = flushMap.keyAt(iFlush);
10127        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
10128        LOGH("Sending ERROR REQUEST for frame %d",
10129               frame_number);
10130
10131        // Send shutter notify to frameworks
10132        camera3_notify_msg_t notify_msg;
10133        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10134        notify_msg.type = CAMERA3_MSG_ERROR;
10135        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10136        notify_msg.message.error.error_stream = NULL;
10137        notify_msg.message.error.frame_number = frame_number;
10138        mCallbackOps->notify(mCallbackOps, &notify_msg);
10139
10140        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
10141        if (NULL == pStream_Buf) {
10142            LOGE("No memory for pending buffers array");
10143            return NO_MEMORY;
10144        }
10145        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
10146
10147        for (size_t j = 0; j < pending.size(); j++) {
10148            const PendingBufferInfo &info = pending.itemAt(j);
10149            pStream_Buf[j].acquire_fence = -1;
10150            pStream_Buf[j].release_fence = -1;
10151            pStream_Buf[j].buffer = info.buffer;
10152            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
10153            pStream_Buf[j].stream = info.stream;
10154        }
10155
10156        result.input_buffer = i->input_buffer;
10157        result.num_output_buffers = (uint32_t)pending.size();
10158        result.output_buffers = pStream_Buf;
10159        result.result = NULL;
10160        result.frame_number = frame_number;
10161        mCallbackOps->process_capture_result(mCallbackOps, &result);
10162        delete [] pStream_Buf;
10163        i = erasePendingRequest(i);
10164    }
10165
10166    /* Reset pending frame Drop list and requests list */
10167    mPendingFrameDropList.clear();
10168
10169    flushMap.clear();
10170    mPendingBuffersMap.num_buffers = 0;
10171    mPendingBuffersMap.mPendingBufferList.clear();
10172    mPendingReprocessResultList.clear();
10173    LOGH("Cleared all the pending buffers ");
10174
10175    return rc;
10176}
10177
10178bool QCamera3HardwareInterface::isOnEncoder(
10179        const cam_dimension_t max_viewfinder_size,
10180        uint32_t width, uint32_t height)
10181{
10182    return (width > (uint32_t)max_viewfinder_size.width ||
10183            height > (uint32_t)max_viewfinder_size.height);
10184}
10185
10186/*===========================================================================
10187 * FUNCTION   : setBundleInfo
10188 *
10189 * DESCRIPTION: Set bundle info for all streams that are bundle.
10190 *
10191 * PARAMETERS : None
10192 *
10193 * RETURN     : NO_ERROR on success
10194 *              Error codes on failure
10195 *==========================================================================*/
10196int32_t QCamera3HardwareInterface::setBundleInfo()
10197{
10198    int32_t rc = NO_ERROR;
10199
10200    if (mChannelHandle) {
10201        cam_bundle_config_t bundleInfo;
10202        memset(&bundleInfo, 0, sizeof(bundleInfo));
10203        rc = mCameraHandle->ops->get_bundle_info(
10204                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10205        if (rc != NO_ERROR) {
10206            LOGE("get_bundle_info failed");
10207            return rc;
10208        }
10209        if (mAnalysisChannel) {
10210            mAnalysisChannel->setBundleInfo(bundleInfo);
10211        }
10212        if (mSupportChannel) {
10213            mSupportChannel->setBundleInfo(bundleInfo);
10214        }
10215        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10216                it != mStreamInfo.end(); it++) {
10217            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10218            channel->setBundleInfo(bundleInfo);
10219        }
10220        if (mRawDumpChannel) {
10221            mRawDumpChannel->setBundleInfo(bundleInfo);
10222        }
10223    }
10224
10225    return rc;
10226}
10227
10228}; //end namespace qcamera
10229