QCamera3HWI.cpp revision d59d4ae1e81b3b4f85c59b75636580830b3f0833
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <sync/sync.h>
44#include "gralloc_priv.h"
45
46// Display dependencies
47#include "qdMetaData.h"
48
49// Camera dependencies
50#include "android/QCamera3External.h"
51#include "util/QCameraFlash.h"
52#include "QCamera3HWI.h"
53#include "QCamera3VendorTags.h"
54#include "QCameraTrace.h"
55
56extern "C" {
57#include "mm_camera_dbg.h"
58}
59
60using namespace android;
61
62namespace qcamera {
63
64#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
65
66#define EMPTY_PIPELINE_DELAY 2
67#define PARTIAL_RESULT_COUNT 2
68#define FRAME_SKIP_DELAY     0
69
70#define MAX_VALUE_8BIT ((1<<8)-1)
71#define MAX_VALUE_10BIT ((1<<10)-1)
72#define MAX_VALUE_12BIT ((1<<12)-1)
73
74#define VIDEO_4K_WIDTH  3840
75#define VIDEO_4K_HEIGHT 2160
76
77#define MAX_EIS_WIDTH 1920
78#define MAX_EIS_HEIGHT 1080
79
80#define MAX_RAW_STREAMS        1
81#define MAX_STALLING_STREAMS   1
82#define MAX_PROCESSED_STREAMS  3
83/* Batch mode is enabled only if FPS set is equal to or greater than this */
84#define MIN_FPS_FOR_BATCH_MODE (120)
85#define PREVIEW_FPS_FOR_HFR    (30)
86#define DEFAULT_VIDEO_FPS      (30.0)
87#define MAX_HFR_BATCH_SIZE     (8)
88#define REGIONS_TUPLE_COUNT    5
89#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
90#define BURST_REPROCESS_PERF_TIME_OUT  (1000) // milliseconds
91// Set a threshold for detection of missing buffers //seconds
92#define MISSING_REQUEST_BUF_TIMEOUT 3
93#define FLUSH_TIMEOUT 3
94#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
95
96#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
97                                              CAM_QCOM_FEATURE_CROP |\
98                                              CAM_QCOM_FEATURE_ROTATION |\
99                                              CAM_QCOM_FEATURE_SHARPNESS |\
100                                              CAM_QCOM_FEATURE_SCALE |\
101                                              CAM_QCOM_FEATURE_CAC |\
102                                              CAM_QCOM_FEATURE_CDS )
103/* Per configuration size for static metadata length*/
104#define PER_CONFIGURATION_SIZE_3 (3)
105
106#define TIMEOUT_NEVER -1
107
108cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
109const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
110extern pthread_mutex_t gCamLock;
111volatile uint32_t gCamHal3LogLevel = 1;
112extern uint8_t gNumCameraSessions;
113
114const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
115    {"On",  CAM_CDS_MODE_ON},
116    {"Off", CAM_CDS_MODE_OFF},
117    {"Auto",CAM_CDS_MODE_AUTO}
118};
119
120const QCamera3HardwareInterface::QCameraMap<
121        camera_metadata_enum_android_control_effect_mode_t,
122        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
123    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
124    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
125    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
126    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
127    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
128    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
129    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
130    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
132};
133
134const QCamera3HardwareInterface::QCameraMap<
135        camera_metadata_enum_android_control_awb_mode_t,
136        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
137    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
138    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
139    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
140    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
141    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
142    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
143    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
145    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
146};
147
148const QCamera3HardwareInterface::QCameraMap<
149        camera_metadata_enum_android_control_scene_mode_t,
150        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
151    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
152    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
153    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
154    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
155    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
157    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
158    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
159    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
160    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
161    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
162    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
163    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
164    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
165    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
166    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
167};
168
169const QCamera3HardwareInterface::QCameraMap<
170        camera_metadata_enum_android_control_af_mode_t,
171        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
172    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
174    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
175    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
176    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
177    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_color_correction_aberration_mode_t,
183        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
184    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
185            CAM_COLOR_CORRECTION_ABERRATION_OFF },
186    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
187            CAM_COLOR_CORRECTION_ABERRATION_FAST },
188    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
189            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
190};
191
192const QCamera3HardwareInterface::QCameraMap<
193        camera_metadata_enum_android_control_ae_antibanding_mode_t,
194        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
195    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_control_ae_mode_t,
203        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
204    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
205    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
207    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
208    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
209};
210
211const QCamera3HardwareInterface::QCameraMap<
212        camera_metadata_enum_android_flash_mode_t,
213        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
214    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
215    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
216    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220        camera_metadata_enum_android_statistics_face_detect_mode_t,
221        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
222    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
229        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
230    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
231      CAM_FOCUS_UNCALIBRATED },
232    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
233      CAM_FOCUS_APPROXIMATE },
234    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
235      CAM_FOCUS_CALIBRATED }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239        camera_metadata_enum_android_lens_state_t,
240        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
241    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
242    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
243};
244
245const int32_t available_thumbnail_sizes[] = {0, 0,
246                                             176, 144,
247                                             320, 240,
248                                             432, 288,
249                                             480, 288,
250                                             512, 288,
251                                             512, 384};
252
253const cam_dimension_t default_hfr_video_sizes[] = {
254    { 3840, 2160 },
255    { 1920, 1080 },
256    { 1280,  720 },
257    {  640,  480 },
258    {  480,  320 }
259};
260
261
262const QCamera3HardwareInterface::QCameraMap<
263        camera_metadata_enum_android_sensor_test_pattern_mode_t,
264        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
265    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
266    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
267    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
268    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
269    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
270    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
271};
272
273/* Since there is no mapping for all the options some Android enum are not listed.
274 * Also, the order in this list is important because while mapping from HAL to Android it will
275 * traverse from lower to higher index which means that for HAL values that are map to different
276 * Android values, the traverse logic will select the first one found.
277 */
278const QCamera3HardwareInterface::QCameraMap<
279        camera_metadata_enum_android_sensor_reference_illuminant1_t,
280        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
289    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
290    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
291    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
292    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
293    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
294    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
295    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
296    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
301    { 60, CAM_HFR_MODE_60FPS},
302    { 90, CAM_HFR_MODE_90FPS},
303    { 120, CAM_HFR_MODE_120FPS},
304    { 150, CAM_HFR_MODE_150FPS},
305    { 180, CAM_HFR_MODE_180FPS},
306    { 210, CAM_HFR_MODE_210FPS},
307    { 240, CAM_HFR_MODE_240FPS},
308    { 480, CAM_HFR_MODE_480FPS},
309};
310
311camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
312    .initialize                         = QCamera3HardwareInterface::initialize,
313    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
314    .register_stream_buffers            = NULL,
315    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
316    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
317    .get_metadata_vendor_tag_ops        = NULL,
318    .dump                               = QCamera3HardwareInterface::dump,
319    .flush                              = QCamera3HardwareInterface::flush,
320    .reserved                           = {0},
321};
322
323/*===========================================================================
324 * FUNCTION   : QCamera3HardwareInterface
325 *
326 * DESCRIPTION: constructor of QCamera3HardwareInterface
327 *
328 * PARAMETERS :
329 *   @cameraId  : camera ID
330 *
331 * RETURN     : none
332 *==========================================================================*/
333QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
334        const camera_module_callbacks_t *callbacks)
335    : mCameraId(cameraId),
336      mCameraHandle(NULL),
337      mCameraInitialized(false),
338      mCallbackOps(NULL),
339      mMetadataChannel(NULL),
340      mPictureChannel(NULL),
341      mRawChannel(NULL),
342      mSupportChannel(NULL),
343      mAnalysisChannel(NULL),
344      mRawDumpChannel(NULL),
345      mDummyBatchChannel(NULL),
346      mChannelHandle(0),
347      mFirstConfiguration(true),
348      mFlush(false),
349      mFlushPerf(false),
350      mParamHeap(NULL),
351      mParameters(NULL),
352      mPrevParameters(NULL),
353      m_bIsVideo(false),
354      m_bIs4KVideo(false),
355      m_bEisSupportedSize(false),
356      m_bEisEnable(false),
357      m_MobicatMask(0),
358      mMinProcessedFrameDuration(0),
359      mMinJpegFrameDuration(0),
360      mMinRawFrameDuration(0),
361      mMetaFrameCount(0U),
362      mUpdateDebugLevel(false),
363      mCallbacks(callbacks),
364      mCaptureIntent(0),
365      mCacMode(0),
366      mBatchSize(0),
367      mToBeQueuedVidBufs(0),
368      mHFRVideoFps(DEFAULT_VIDEO_FPS),
369      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
370      mFirstFrameNumberInBatch(0),
371      mNeedSensorRestart(false),
372      mLdafCalibExist(false),
373      mPowerHintEnabled(false),
374      mLastCustIntentFrmNum(-1),
375      mState(CLOSED)
376{
377    getLogLevel();
378    m_perfLock.lock_init();
379    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
380    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
381    mCameraDevice.common.close = close_camera_device;
382    mCameraDevice.ops = &mCameraOps;
383    mCameraDevice.priv = this;
384    gCamCapability[cameraId]->version = CAM_HAL_V3;
385    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
386    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
387    gCamCapability[cameraId]->min_num_pp_bufs = 3;
388
389    pthread_cond_init(&mBuffersCond, NULL);
390
391    pthread_cond_init(&mRequestCond, NULL);
392    mPendingLiveRequest = 0;
393    mCurrentRequestId = -1;
394    pthread_mutex_init(&mMutex, NULL);
395
396    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
397        mDefaultMetadata[i] = NULL;
398
399    // Getting system props of different kinds
400    char prop[PROPERTY_VALUE_MAX];
401    memset(prop, 0, sizeof(prop));
402    property_get("persist.camera.raw.dump", prop, "0");
403    mEnableRawDump = atoi(prop);
404    if (mEnableRawDump)
405        LOGD("Raw dump from Camera HAL enabled");
406
407    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
408    memset(mLdafCalib, 0, sizeof(mLdafCalib));
409
410    memset(prop, 0, sizeof(prop));
411    property_get("persist.camera.tnr.preview", prop, "0");
412    m_bTnrPreview = (uint8_t)atoi(prop);
413
414    memset(prop, 0, sizeof(prop));
415    property_get("persist.camera.tnr.video", prop, "0");
416    m_bTnrVideo = (uint8_t)atoi(prop);
417
418    //Load and read GPU library.
419    lib_surface_utils = NULL;
420    LINK_get_surface_pixel_alignment = NULL;
421    mSurfaceStridePadding = CAM_PAD_TO_32;
422    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
423    if (lib_surface_utils) {
424        *(void **)&LINK_get_surface_pixel_alignment =
425                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
426         if (LINK_get_surface_pixel_alignment) {
427             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
428         }
429         dlclose(lib_surface_utils);
430    }
431}
432
433/*===========================================================================
434 * FUNCTION   : ~QCamera3HardwareInterface
435 *
436 * DESCRIPTION: destructor of QCamera3HardwareInterface
437 *
438 * PARAMETERS : none
439 *
440 * RETURN     : none
441 *==========================================================================*/
442QCamera3HardwareInterface::~QCamera3HardwareInterface()
443{
444    LOGD("E");
445
446    /* Turn off current power hint before acquiring perfLock in case they
447     * conflict with each other */
448    disablePowerHint();
449
450    m_perfLock.lock_acq();
451
452    /* We need to stop all streams before deleting any stream */
453    if (mRawDumpChannel) {
454        mRawDumpChannel->stop();
455    }
456
457    // NOTE: 'camera3_stream_t *' objects are already freed at
458    //        this stage by the framework
459    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
460        it != mStreamInfo.end(); it++) {
461        QCamera3ProcessingChannel *channel = (*it)->channel;
462        if (channel) {
463            channel->stop();
464        }
465    }
466    if (mSupportChannel)
467        mSupportChannel->stop();
468
469    if (mAnalysisChannel) {
470        mAnalysisChannel->stop();
471    }
472    if (mMetadataChannel) {
473        mMetadataChannel->stop();
474    }
475    if (mChannelHandle) {
476        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
477                mChannelHandle);
478        LOGD("stopping channel %d", mChannelHandle);
479    }
480
481    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
482        it != mStreamInfo.end(); it++) {
483        QCamera3ProcessingChannel *channel = (*it)->channel;
484        if (channel)
485            delete channel;
486        free (*it);
487    }
488    if (mSupportChannel) {
489        delete mSupportChannel;
490        mSupportChannel = NULL;
491    }
492
493    if (mAnalysisChannel) {
494        delete mAnalysisChannel;
495        mAnalysisChannel = NULL;
496    }
497    if (mRawDumpChannel) {
498        delete mRawDumpChannel;
499        mRawDumpChannel = NULL;
500    }
501    if (mDummyBatchChannel) {
502        delete mDummyBatchChannel;
503        mDummyBatchChannel = NULL;
504    }
505    mPictureChannel = NULL;
506
507    if (mMetadataChannel) {
508        delete mMetadataChannel;
509        mMetadataChannel = NULL;
510    }
511
512    /* Clean up all channels */
513    if (mCameraInitialized) {
514        if(!mFirstConfiguration){
515            //send the last unconfigure
516            cam_stream_size_info_t stream_config_info;
517            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
518            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
519            stream_config_info.buffer_info.max_buffers =
520                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
521            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
522                    stream_config_info);
523            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
524            if (rc < 0) {
525                LOGE("set_parms failed for unconfigure");
526            }
527        }
528        deinitParameters();
529    }
530
531    if (mChannelHandle) {
532        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
533                mChannelHandle);
534        LOGH("deleting channel %d", mChannelHandle);
535        mChannelHandle = 0;
536    }
537
538    if (mState != CLOSED)
539        closeCamera();
540
541    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
542        req.mPendingBufferList.clear();
543    }
544    mPendingBuffersMap.mPendingBuffersInRequest.clear();
545    mPendingReprocessResultList.clear();
546    for (pendingRequestIterator i = mPendingRequestsList.begin();
547            i != mPendingRequestsList.end();) {
548        i = erasePendingRequest(i);
549    }
550    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
551        if (mDefaultMetadata[i])
552            free_camera_metadata(mDefaultMetadata[i]);
553
554    m_perfLock.lock_rel();
555    m_perfLock.lock_deinit();
556
557    pthread_cond_destroy(&mRequestCond);
558
559    pthread_cond_destroy(&mBuffersCond);
560
561    pthread_mutex_destroy(&mMutex);
562    LOGD("X");
563}
564
565/*===========================================================================
566 * FUNCTION   : erasePendingRequest
567 *
568 * DESCRIPTION: function to erase a desired pending request after freeing any
569 *              allocated memory
570 *
571 * PARAMETERS :
572 *   @i       : iterator pointing to pending request to be erased
573 *
574 * RETURN     : iterator pointing to the next request
575 *==========================================================================*/
576QCamera3HardwareInterface::pendingRequestIterator
577        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
578{
579    if (i->input_buffer != NULL) {
580        free(i->input_buffer);
581        i->input_buffer = NULL;
582    }
583    if (i->settings != NULL)
584        free_camera_metadata((camera_metadata_t*)i->settings);
585    return mPendingRequestsList.erase(i);
586}
587
588/*===========================================================================
589 * FUNCTION   : camEvtHandle
590 *
591 * DESCRIPTION: Function registered to mm-camera-interface to handle events
592 *
593 * PARAMETERS :
594 *   @camera_handle : interface layer camera handle
595 *   @evt           : ptr to event
596 *   @user_data     : user data ptr
597 *
598 * RETURN     : none
599 *==========================================================================*/
600void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
601                                          mm_camera_event_t *evt,
602                                          void *user_data)
603{
604    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
605    if (obj && evt) {
606        switch(evt->server_event_type) {
607            case CAM_EVENT_TYPE_DAEMON_DIED:
608                pthread_mutex_lock(&obj->mMutex);
609                obj->mState = ERROR;
610                pthread_mutex_unlock(&obj->mMutex);
611                LOGE("Fatal, camera daemon died");
612                break;
613
614            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
615                LOGD("HAL got request pull from Daemon");
616                pthread_mutex_lock(&obj->mMutex);
617                obj->mWokenUpByDaemon = true;
618                obj->unblockRequestIfNecessary();
619                pthread_mutex_unlock(&obj->mMutex);
620                break;
621
622            default:
623                LOGW("Warning: Unhandled event %d",
624                        evt->server_event_type);
625                break;
626        }
627    } else {
628        LOGE("NULL user_data/evt");
629    }
630}
631
632/*===========================================================================
633 * FUNCTION   : openCamera
634 *
635 * DESCRIPTION: open camera
636 *
637 * PARAMETERS :
638 *   @hw_device  : double ptr for camera device struct
639 *
640 * RETURN     : int32_t type of status
641 *              NO_ERROR  -- success
642 *              none-zero failure code
643 *==========================================================================*/
644int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
645{
646    int rc = 0;
647    if (mState != CLOSED) {
648        *hw_device = NULL;
649        return PERMISSION_DENIED;
650    }
651
652    m_perfLock.lock_acq();
653    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
654             mCameraId);
655
656    rc = openCamera();
657    if (rc == 0) {
658        *hw_device = &mCameraDevice.common;
659    } else
660        *hw_device = NULL;
661
662    m_perfLock.lock_rel();
663    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
664             mCameraId, rc);
665
666    if (rc == NO_ERROR) {
667        mState = OPENED;
668    }
669    return rc;
670}
671
672/*===========================================================================
673 * FUNCTION   : openCamera
674 *
675 * DESCRIPTION: open camera
676 *
677 * PARAMETERS : none
678 *
679 * RETURN     : int32_t type of status
680 *              NO_ERROR  -- success
681 *              none-zero failure code
682 *==========================================================================*/
683int QCamera3HardwareInterface::openCamera()
684{
685    int rc = 0;
686    char value[PROPERTY_VALUE_MAX];
687
688    KPI_ATRACE_CALL();
689    if (mCameraHandle) {
690        LOGE("Failure: Camera already opened");
691        return ALREADY_EXISTS;
692    }
693
694    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
695    if (rc < 0) {
696        LOGE("Failed to reserve flash for camera id: %d",
697                mCameraId);
698        return UNKNOWN_ERROR;
699    }
700
701    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
702    if (rc) {
703        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
704        return rc;
705    }
706
707    if (!mCameraHandle) {
708        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
709        return -ENODEV;
710    }
711
712    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
713            camEvtHandle, (void *)this);
714
715    if (rc < 0) {
716        LOGE("Error, failed to register event callback");
717        /* Not closing camera here since it is already handled in destructor */
718        return FAILED_TRANSACTION;
719    }
720
721    mExifParams.debug_params =
722            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
723    if (mExifParams.debug_params) {
724        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
725    } else {
726        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
727        return NO_MEMORY;
728    }
729    mFirstConfiguration = true;
730
731    //Notify display HAL that a camera session is active.
732    //But avoid calling the same during bootup because camera service might open/close
733    //cameras at boot time during its initialization and display service will also internally
734    //wait for camera service to initialize first while calling this display API, resulting in a
735    //deadlock situation. Since boot time camera open/close calls are made only to fetch
736    //capabilities, no need of this display bw optimization.
737    //Use "service.bootanim.exit" property to know boot status.
738    property_get("service.bootanim.exit", value, "0");
739    if (atoi(value) == 1) {
740        pthread_mutex_lock(&gCamLock);
741        if (gNumCameraSessions++ == 0) {
742            setCameraLaunchStatus(true);
743        }
744        pthread_mutex_unlock(&gCamLock);
745    }
746
747    return NO_ERROR;
748}
749
750/*===========================================================================
751 * FUNCTION   : closeCamera
752 *
753 * DESCRIPTION: close camera
754 *
755 * PARAMETERS : none
756 *
757 * RETURN     : int32_t type of status
758 *              NO_ERROR  -- success
759 *              none-zero failure code
760 *==========================================================================*/
761int QCamera3HardwareInterface::closeCamera()
762{
763    KPI_ATRACE_CALL();
764    int rc = NO_ERROR;
765    char value[PROPERTY_VALUE_MAX];
766
767    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
768             mCameraId);
769    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
770    mCameraHandle = NULL;
771
772    //Notify display HAL that there is no active camera session
773    //but avoid calling the same during bootup. Refer to openCamera
774    //for more details.
775    property_get("service.bootanim.exit", value, "0");
776    if (atoi(value) == 1) {
777        pthread_mutex_lock(&gCamLock);
778        if (--gNumCameraSessions == 0) {
779            setCameraLaunchStatus(false);
780        }
781        pthread_mutex_unlock(&gCamLock);
782    }
783
784    if (mExifParams.debug_params) {
785        free(mExifParams.debug_params);
786        mExifParams.debug_params = NULL;
787    }
788    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
789        LOGW("Failed to release flash for camera id: %d",
790                mCameraId);
791    }
792    mState = CLOSED;
793    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
794         mCameraId, rc);
795    return rc;
796}
797
798/*===========================================================================
799 * FUNCTION   : initialize
800 *
801 * DESCRIPTION: Initialize frameworks callback functions
802 *
803 * PARAMETERS :
804 *   @callback_ops : callback function to frameworks
805 *
806 * RETURN     :
807 *
808 *==========================================================================*/
809int QCamera3HardwareInterface::initialize(
810        const struct camera3_callback_ops *callback_ops)
811{
812    ATRACE_CALL();
813    int rc;
814
815    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
816    pthread_mutex_lock(&mMutex);
817
818    // Validate current state
819    switch (mState) {
820        case OPENED:
821            /* valid state */
822            break;
823
824        case ERROR:
825            pthread_mutex_unlock(&mMutex);
826            handleCameraDeviceError();
827            rc = -ENODEV;
828            goto err2;
829
830        default:
831            LOGE("Invalid state %d", mState);
832            rc = -ENODEV;
833            goto err1;
834    }
835
836    rc = initParameters();
837    if (rc < 0) {
838        LOGE("initParamters failed %d", rc);
839        goto err1;
840    }
841    mCallbackOps = callback_ops;
842
843    mChannelHandle = mCameraHandle->ops->add_channel(
844            mCameraHandle->camera_handle, NULL, NULL, this);
845    if (mChannelHandle == 0) {
846        LOGE("add_channel failed");
847        rc = -ENOMEM;
848        pthread_mutex_unlock(&mMutex);
849        return rc;
850    }
851
852    pthread_mutex_unlock(&mMutex);
853    mCameraInitialized = true;
854    mState = INITIALIZED;
855    LOGI("X");
856    return 0;
857
858err1:
859    pthread_mutex_unlock(&mMutex);
860err2:
861    return rc;
862}
863
864/*===========================================================================
865 * FUNCTION   : validateStreamDimensions
866 *
867 * DESCRIPTION: Check if the configuration requested are those advertised
868 *
869 * PARAMETERS :
870 *   @stream_list : streams to be configured
871 *
872 * RETURN     :
873 *
874 *==========================================================================*/
875int QCamera3HardwareInterface::validateStreamDimensions(
876        camera3_stream_configuration_t *streamList)
877{
878    int rc = NO_ERROR;
879    size_t count = 0;
880
881    camera3_stream_t *inputStream = NULL;
882    /*
883    * Loop through all streams to find input stream if it exists*
884    */
885    for (size_t i = 0; i< streamList->num_streams; i++) {
886        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
887            if (inputStream != NULL) {
888                LOGE("Error, Multiple input streams requested");
889                return -EINVAL;
890            }
891            inputStream = streamList->streams[i];
892        }
893    }
894    /*
895    * Loop through all streams requested in configuration
896    * Check if unsupported sizes have been requested on any of them
897    */
898    for (size_t j = 0; j < streamList->num_streams; j++) {
899        bool sizeFound = false;
900        camera3_stream_t *newStream = streamList->streams[j];
901
902        uint32_t rotatedHeight = newStream->height;
903        uint32_t rotatedWidth = newStream->width;
904        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
905                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
906            rotatedHeight = newStream->width;
907            rotatedWidth = newStream->height;
908        }
909
910        /*
911        * Sizes are different for each type of stream format check against
912        * appropriate table.
913        */
914        switch (newStream->format) {
915        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
916        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
917        case HAL_PIXEL_FORMAT_RAW10:
918            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
919            for (size_t i = 0; i < count; i++) {
920                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
921                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
922                    sizeFound = true;
923                    break;
924                }
925            }
926            break;
927        case HAL_PIXEL_FORMAT_BLOB:
928            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
929            /* Verify set size against generated sizes table */
930            for (size_t i = 0; i < count; i++) {
931                if (((int32_t)rotatedWidth ==
932                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
933                        ((int32_t)rotatedHeight ==
934                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
935                    sizeFound = true;
936                    break;
937                }
938            }
939            break;
940        case HAL_PIXEL_FORMAT_YCbCr_420_888:
941        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
942        default:
943            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
944                    || newStream->stream_type == CAMERA3_STREAM_INPUT
945                    || IS_USAGE_ZSL(newStream->usage)) {
946                if (((int32_t)rotatedWidth ==
947                                gCamCapability[mCameraId]->active_array_size.width) &&
948                                ((int32_t)rotatedHeight ==
949                                gCamCapability[mCameraId]->active_array_size.height)) {
950                    sizeFound = true;
951                    break;
952                }
953                /* We could potentially break here to enforce ZSL stream
954                 * set from frameworks always is full active array size
955                 * but it is not clear from the spc if framework will always
956                 * follow that, also we have logic to override to full array
957                 * size, so keeping the logic lenient at the moment
958                 */
959            }
960            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
961                    MAX_SIZES_CNT);
962            for (size_t i = 0; i < count; i++) {
963                if (((int32_t)rotatedWidth ==
964                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
965                            ((int32_t)rotatedHeight ==
966                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
967                    sizeFound = true;
968                    break;
969                }
970            }
971            break;
972        } /* End of switch(newStream->format) */
973
974        /* We error out even if a single stream has unsupported size set */
975        if (!sizeFound) {
976            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
977                    rotatedWidth, rotatedHeight, newStream->format,
978                    gCamCapability[mCameraId]->active_array_size.width,
979                    gCamCapability[mCameraId]->active_array_size.height);
980            rc = -EINVAL;
981            break;
982        }
983    } /* End of for each stream */
984    return rc;
985}
986
987/*==============================================================================
988 * FUNCTION   : isSupportChannelNeeded
989 *
990 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
991 *
992 * PARAMETERS :
993 *   @stream_list : streams to be configured
994 *   @stream_config_info : the config info for streams to be configured
995 *
996 * RETURN     : Boolen true/false decision
997 *
998 *==========================================================================*/
999bool QCamera3HardwareInterface::isSupportChannelNeeded(
1000        camera3_stream_configuration_t *streamList,
1001        cam_stream_size_info_t stream_config_info)
1002{
1003    uint32_t i;
1004    bool pprocRequested = false;
1005    /* Check for conditions where PProc pipeline does not have any streams*/
1006    for (i = 0; i < stream_config_info.num_streams; i++) {
1007        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1008                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1009            pprocRequested = true;
1010            break;
1011        }
1012    }
1013
1014    if (pprocRequested == false )
1015        return true;
1016
1017    /* Dummy stream needed if only raw or jpeg streams present */
1018    for (i = 0; i < streamList->num_streams; i++) {
1019        switch(streamList->streams[i]->format) {
1020            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1021            case HAL_PIXEL_FORMAT_RAW10:
1022            case HAL_PIXEL_FORMAT_RAW16:
1023            case HAL_PIXEL_FORMAT_BLOB:
1024                break;
1025            default:
1026                return false;
1027        }
1028    }
1029    return true;
1030}
1031
1032/*==============================================================================
1033 * FUNCTION   : getSensorOutputSize
1034 *
1035 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1036 *
1037 * PARAMETERS :
1038 *   @sensor_dim : sensor output dimension (output)
1039 *
1040 * RETURN     : int32_t type of status
1041 *              NO_ERROR  -- success
1042 *              none-zero failure code
1043 *
1044 *==========================================================================*/
1045int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1046{
1047    int32_t rc = NO_ERROR;
1048
1049    cam_dimension_t max_dim = {0, 0};
1050    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1051        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1052            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1053        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1054            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1055    }
1056
1057    clear_metadata_buffer(mParameters);
1058
1059    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1060            max_dim);
1061    if (rc != NO_ERROR) {
1062        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1063        return rc;
1064    }
1065
1066    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1067    if (rc != NO_ERROR) {
1068        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1069        return rc;
1070    }
1071
1072    clear_metadata_buffer(mParameters);
1073    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1074
1075    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1076            mParameters);
1077    if (rc != NO_ERROR) {
1078        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1079        return rc;
1080    }
1081
1082    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1083    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1084
1085    return rc;
1086}
1087
1088/*==============================================================================
1089 * FUNCTION   : enablePowerHint
1090 *
1091 * DESCRIPTION: enable single powerhint for preview and different video modes.
1092 *
1093 * PARAMETERS :
1094 *
1095 * RETURN     : NULL
1096 *
1097 *==========================================================================*/
1098void QCamera3HardwareInterface::enablePowerHint()
1099{
1100    if (!mPowerHintEnabled) {
1101        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1102        mPowerHintEnabled = true;
1103    }
1104}
1105
1106/*==============================================================================
1107 * FUNCTION   : disablePowerHint
1108 *
1109 * DESCRIPTION: disable current powerhint.
1110 *
1111 * PARAMETERS :
1112 *
1113 * RETURN     : NULL
1114 *
1115 *==========================================================================*/
1116void QCamera3HardwareInterface::disablePowerHint()
1117{
1118    if (mPowerHintEnabled) {
1119        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1120        mPowerHintEnabled = false;
1121    }
1122}
1123
1124/*==============================================================================
1125 * FUNCTION   : addToPPFeatureMask
1126 *
1127 * DESCRIPTION: add additional features to pp feature mask based on
1128 *              stream type and usecase
1129 *
1130 * PARAMETERS :
1131 *   @stream_format : stream type for feature mask
1132 *   @stream_idx : stream idx within postprocess_mask list to change
1133 *
1134 * RETURN     : NULL
1135 *
1136 *==========================================================================*/
1137void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1138        uint32_t stream_idx)
1139{
1140    char feature_mask_value[PROPERTY_VALUE_MAX];
1141    uint32_t feature_mask;
1142    int args_converted;
1143    int property_len;
1144
1145    /* Get feature mask from property */
1146    property_len = property_get("persist.camera.hal3.feature",
1147            feature_mask_value, "0");
1148    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1149            (feature_mask_value[1] == 'x')) {
1150        args_converted = sscanf(feature_mask_value, "0x%x", &feature_mask);
1151    } else {
1152        args_converted = sscanf(feature_mask_value, "%d", &feature_mask);
1153    }
1154    if (1 != args_converted) {
1155        feature_mask = 0;
1156        LOGE("Wrong feature mask %s", feature_mask_value);
1157        return;
1158    }
1159
1160    switch (stream_format) {
1161    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1162        /* Add LLVD to pp feature mask only if video hint is enabled */
1163        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1164            mStreamConfigInfo.postprocess_mask[stream_idx]
1165                    |= CAM_QTI_FEATURE_SW_TNR;
1166            LOGH("Added SW TNR to pp feature mask");
1167        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1168            mStreamConfigInfo.postprocess_mask[stream_idx]
1169                    |= CAM_QCOM_FEATURE_LLVD;
1170            LOGH("Added LLVD SeeMore to pp feature mask");
1171        }
1172        break;
1173    }
1174    default:
1175        break;
1176    }
1177    LOGD("PP feature mask %x",
1178            mStreamConfigInfo.postprocess_mask[stream_idx]);
1179}
1180
1181/*==============================================================================
1182 * FUNCTION   : updateFpsInPreviewBuffer
1183 *
1184 * DESCRIPTION: update FPS information in preview buffer.
1185 *
1186 * PARAMETERS :
1187 *   @metadata    : pointer to metadata buffer
1188 *   @frame_number: frame_number to look for in pending buffer list
1189 *
1190 * RETURN     : None
1191 *
1192 *==========================================================================*/
1193void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1194        uint32_t frame_number)
1195{
1196    // Mark all pending buffers for this particular request
1197    // with corresponding framerate information
1198    for (List<PendingBuffersInRequest>::iterator req =
1199            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1200            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1201        for(List<PendingBufferInfo>::iterator j =
1202                req->mPendingBufferList.begin();
1203                j != req->mPendingBufferList.end(); j++) {
1204            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1205            if ((req->frame_number == frame_number) &&
1206                (channel->getStreamTypeMask() &
1207                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1208                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1209                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1210                    int32_t cameraFps = float_range->max_fps;
1211                    struct private_handle_t *priv_handle =
1212                        (struct private_handle_t *)(*(j->buffer));
1213                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1214                }
1215            }
1216        }
1217    }
1218}
1219
1220/*===========================================================================
1221 * FUNCTION   : configureStreams
1222 *
1223 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1224 *              and output streams.
1225 *
1226 * PARAMETERS :
1227 *   @stream_list : streams to be configured
1228 *
1229 * RETURN     :
1230 *
1231 *==========================================================================*/
1232int QCamera3HardwareInterface::configureStreams(
1233        camera3_stream_configuration_t *streamList)
1234{
1235    ATRACE_CALL();
1236    int rc = 0;
1237
1238    // Acquire perfLock before configure streams
1239    m_perfLock.lock_acq();
1240    rc = configureStreamsPerfLocked(streamList);
1241    m_perfLock.lock_rel();
1242
1243    return rc;
1244}
1245
1246/*===========================================================================
1247 * FUNCTION   : configureStreamsPerfLocked
1248 *
1249 * DESCRIPTION: configureStreams while perfLock is held.
1250 *
1251 * PARAMETERS :
1252 *   @stream_list : streams to be configured
1253 *
1254 * RETURN     : int32_t type of status
1255 *              NO_ERROR  -- success
1256 *              none-zero failure code
1257 *==========================================================================*/
1258int QCamera3HardwareInterface::configureStreamsPerfLocked(
1259        camera3_stream_configuration_t *streamList)
1260{
1261    ATRACE_CALL();
1262    int rc = 0;
1263
1264    // Sanity check stream_list
1265    if (streamList == NULL) {
1266        LOGE("NULL stream configuration");
1267        return BAD_VALUE;
1268    }
1269    if (streamList->streams == NULL) {
1270        LOGE("NULL stream list");
1271        return BAD_VALUE;
1272    }
1273
1274    if (streamList->num_streams < 1) {
1275        LOGE("Bad number of streams requested: %d",
1276                streamList->num_streams);
1277        return BAD_VALUE;
1278    }
1279
1280    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1281        LOGE("Maximum number of streams %d exceeded: %d",
1282                MAX_NUM_STREAMS, streamList->num_streams);
1283        return BAD_VALUE;
1284    }
1285
1286    mOpMode = streamList->operation_mode;
1287    LOGD("mOpMode: %d", mOpMode);
1288
1289    /* first invalidate all the steams in the mStreamList
1290     * if they appear again, they will be validated */
1291    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1292            it != mStreamInfo.end(); it++) {
1293        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1294        channel->stop();
1295        (*it)->status = INVALID;
1296    }
1297
1298    if (mRawDumpChannel) {
1299        mRawDumpChannel->stop();
1300        delete mRawDumpChannel;
1301        mRawDumpChannel = NULL;
1302    }
1303
1304    if (mSupportChannel)
1305        mSupportChannel->stop();
1306
1307    if (mAnalysisChannel) {
1308        mAnalysisChannel->stop();
1309    }
1310    if (mMetadataChannel) {
1311        /* If content of mStreamInfo is not 0, there is metadata stream */
1312        mMetadataChannel->stop();
1313    }
1314    if (mChannelHandle) {
1315        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1316                mChannelHandle);
1317        LOGD("stopping channel %d", mChannelHandle);
1318    }
1319
1320    pthread_mutex_lock(&mMutex);
1321
1322    // Check state
1323    switch (mState) {
1324        case INITIALIZED:
1325        case CONFIGURED:
1326        case STARTED:
1327            /* valid state */
1328            break;
1329
1330        case ERROR:
1331            pthread_mutex_unlock(&mMutex);
1332            handleCameraDeviceError();
1333            return -ENODEV;
1334
1335        default:
1336            LOGE("Invalid state %d", mState);
1337            pthread_mutex_unlock(&mMutex);
1338            return -ENODEV;
1339    }
1340
1341    /* Check whether we have video stream */
1342    m_bIs4KVideo = false;
1343    m_bIsVideo = false;
1344    m_bEisSupportedSize = false;
1345    m_bTnrEnabled = false;
1346    bool isZsl = false;
1347    uint32_t videoWidth = 0U;
1348    uint32_t videoHeight = 0U;
1349    size_t rawStreamCnt = 0;
1350    size_t stallStreamCnt = 0;
1351    size_t processedStreamCnt = 0;
1352    // Number of streams on ISP encoder path
1353    size_t numStreamsOnEncoder = 0;
1354    size_t numYuv888OnEncoder = 0;
1355    bool bYuv888OverrideJpeg = false;
1356    cam_dimension_t largeYuv888Size = {0, 0};
1357    cam_dimension_t maxViewfinderSize = {0, 0};
1358    bool bJpegExceeds4K = false;
1359    bool bUseCommonFeatureMask = false;
1360    uint32_t commonFeatureMask = 0;
1361    bool bSmallJpegSize = false;
1362    uint32_t width_ratio;
1363    uint32_t height_ratio;
1364    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1365    camera3_stream_t *inputStream = NULL;
1366    bool isJpeg = false;
1367    cam_dimension_t jpegSize = {0, 0};
1368
1369    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1370
1371    /*EIS configuration*/
1372    bool eisSupported = false;
1373    bool oisSupported = false;
1374    int32_t margin_index = -1;
1375    uint8_t eis_prop_set;
1376    uint32_t maxEisWidth = 0;
1377    uint32_t maxEisHeight = 0;
1378
1379    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1380
1381    size_t count = IS_TYPE_MAX;
1382    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1383    for (size_t i = 0; i < count; i++) {
1384        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1385            eisSupported = true;
1386            margin_index = (int32_t)i;
1387            break;
1388        }
1389    }
1390
1391    count = CAM_OPT_STAB_MAX;
1392    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1393    for (size_t i = 0; i < count; i++) {
1394        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1395            oisSupported = true;
1396            break;
1397        }
1398    }
1399
1400    if (eisSupported) {
1401        maxEisWidth = MAX_EIS_WIDTH;
1402        maxEisHeight = MAX_EIS_HEIGHT;
1403    }
1404
1405    /* EIS setprop control */
1406    char eis_prop[PROPERTY_VALUE_MAX];
1407    memset(eis_prop, 0, sizeof(eis_prop));
1408    property_get("persist.camera.eis.enable", eis_prop, "0");
1409    eis_prop_set = (uint8_t)atoi(eis_prop);
1410
1411    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1412            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1413
1414    /* stream configurations */
1415    for (size_t i = 0; i < streamList->num_streams; i++) {
1416        camera3_stream_t *newStream = streamList->streams[i];
1417        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1418                "height = %d, rotation = %d, usage = 0x%x",
1419                 i, newStream->stream_type, newStream->format,
1420                newStream->width, newStream->height, newStream->rotation,
1421                newStream->usage);
1422        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1423                newStream->stream_type == CAMERA3_STREAM_INPUT){
1424            isZsl = true;
1425        }
1426        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1427            inputStream = newStream;
1428        }
1429
1430        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1431            isJpeg = true;
1432            jpegSize.width = newStream->width;
1433            jpegSize.height = newStream->height;
1434            if (newStream->width > VIDEO_4K_WIDTH ||
1435                    newStream->height > VIDEO_4K_HEIGHT)
1436                bJpegExceeds4K = true;
1437        }
1438
1439        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1440                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1441            m_bIsVideo = true;
1442            videoWidth = newStream->width;
1443            videoHeight = newStream->height;
1444            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1445                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1446                m_bIs4KVideo = true;
1447            }
1448            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1449                                  (newStream->height <= maxEisHeight);
1450        }
1451        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1452                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1453            switch (newStream->format) {
1454            case HAL_PIXEL_FORMAT_BLOB:
1455                stallStreamCnt++;
1456                if (isOnEncoder(maxViewfinderSize, newStream->width,
1457                        newStream->height)) {
1458                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1459                    numStreamsOnEncoder++;
1460                }
1461                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1462                        newStream->width);
1463                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1464                        newStream->height);;
1465                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1466                        "FATAL: max_downscale_factor cannot be zero and so assert");
1467                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1468                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1469                    LOGH("Setting small jpeg size flag to true");
1470                    bSmallJpegSize = true;
1471                }
1472                break;
1473            case HAL_PIXEL_FORMAT_RAW10:
1474            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1475            case HAL_PIXEL_FORMAT_RAW16:
1476                rawStreamCnt++;
1477                break;
1478            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1479                processedStreamCnt++;
1480                if (isOnEncoder(maxViewfinderSize, newStream->width,
1481                        newStream->height)) {
1482                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1483                            IS_USAGE_ZSL(newStream->usage)) {
1484                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1485                    } else {
1486                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1487                    }
1488                    numStreamsOnEncoder++;
1489                }
1490                break;
1491            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1492                processedStreamCnt++;
1493                if (isOnEncoder(maxViewfinderSize, newStream->width,
1494                        newStream->height)) {
1495                    // If Yuv888 size is not greater than 4K, set feature mask
1496                    // to SUPERSET so that it support concurrent request on
1497                    // YUV and JPEG.
1498                    if (newStream->width <= VIDEO_4K_WIDTH &&
1499                            newStream->height <= VIDEO_4K_HEIGHT) {
1500                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1501                    } else {
1502                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1503                    }
1504                    numStreamsOnEncoder++;
1505                    numYuv888OnEncoder++;
1506                    largeYuv888Size.width = newStream->width;
1507                    largeYuv888Size.height = newStream->height;
1508                }
1509                break;
1510            default:
1511                processedStreamCnt++;
1512                if (isOnEncoder(maxViewfinderSize, newStream->width,
1513                        newStream->height)) {
1514                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1515                    numStreamsOnEncoder++;
1516                }
1517                break;
1518            }
1519
1520        }
1521    }
1522
1523    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1524        !m_bIsVideo) {
1525        m_bEisEnable = false;
1526    }
1527
1528    /* Logic to enable/disable TNR based on specific config size/etc.*/
1529    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1530            ((videoWidth == 1920 && videoHeight == 1080) ||
1531            (videoWidth == 1280 && videoHeight == 720)) &&
1532            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1533        m_bTnrEnabled = true;
1534
1535    /* Check if num_streams is sane */
1536    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1537            rawStreamCnt > MAX_RAW_STREAMS ||
1538            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1539        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1540                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1541        pthread_mutex_unlock(&mMutex);
1542        return -EINVAL;
1543    }
1544    /* Check whether we have zsl stream or 4k video case */
1545    if (isZsl && m_bIsVideo) {
1546        LOGE("Currently invalid configuration ZSL&Video!");
1547        pthread_mutex_unlock(&mMutex);
1548        return -EINVAL;
1549    }
1550    /* Check if stream sizes are sane */
1551    if (numStreamsOnEncoder > 2) {
1552        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1553        pthread_mutex_unlock(&mMutex);
1554        return -EINVAL;
1555    } else if (1 < numStreamsOnEncoder){
1556        bUseCommonFeatureMask = true;
1557        LOGH("Multiple streams above max viewfinder size, common mask needed");
1558    }
1559
1560    /* Check if BLOB size is greater than 4k in 4k recording case */
1561    if (m_bIs4KVideo && bJpegExceeds4K) {
1562        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1563        pthread_mutex_unlock(&mMutex);
1564        return -EINVAL;
1565    }
1566
1567    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1568    // the YUV stream's size is greater or equal to the JPEG size, set common
1569    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1570    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1571            jpegSize.width, jpegSize.height) &&
1572            largeYuv888Size.width > jpegSize.width &&
1573            largeYuv888Size.height > jpegSize.height) {
1574        bYuv888OverrideJpeg = true;
1575    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1576        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1577    }
1578
1579    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %x",
1580            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1581            commonFeatureMask);
1582    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1583            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1584
1585    rc = validateStreamDimensions(streamList);
1586    if (rc == NO_ERROR) {
1587        rc = validateStreamRotations(streamList);
1588    }
1589    if (rc != NO_ERROR) {
1590        LOGE("Invalid stream configuration requested!");
1591        pthread_mutex_unlock(&mMutex);
1592        return rc;
1593    }
1594
1595    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1596    camera3_stream_t *jpegStream = NULL;
1597    for (size_t i = 0; i < streamList->num_streams; i++) {
1598        camera3_stream_t *newStream = streamList->streams[i];
1599        LOGH("newStream type = %d, stream format = %d "
1600                "stream size : %d x %d, stream rotation = %d",
1601                 newStream->stream_type, newStream->format,
1602                newStream->width, newStream->height, newStream->rotation);
1603        //if the stream is in the mStreamList validate it
1604        bool stream_exists = false;
1605        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1606                it != mStreamInfo.end(); it++) {
1607            if ((*it)->stream == newStream) {
1608                QCamera3ProcessingChannel *channel =
1609                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1610                stream_exists = true;
1611                if (channel)
1612                    delete channel;
1613                (*it)->status = VALID;
1614                (*it)->stream->priv = NULL;
1615                (*it)->channel = NULL;
1616            }
1617        }
1618        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1619            //new stream
1620            stream_info_t* stream_info;
1621            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1622            if (!stream_info) {
1623               LOGE("Could not allocate stream info");
1624               rc = -ENOMEM;
1625               pthread_mutex_unlock(&mMutex);
1626               return rc;
1627            }
1628            stream_info->stream = newStream;
1629            stream_info->status = VALID;
1630            stream_info->channel = NULL;
1631            mStreamInfo.push_back(stream_info);
1632        }
1633        /* Covers Opaque ZSL and API1 F/W ZSL */
1634        if (IS_USAGE_ZSL(newStream->usage)
1635                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1636            if (zslStream != NULL) {
1637                LOGE("Multiple input/reprocess streams requested!");
1638                pthread_mutex_unlock(&mMutex);
1639                return BAD_VALUE;
1640            }
1641            zslStream = newStream;
1642        }
1643        /* Covers YUV reprocess */
1644        if (inputStream != NULL) {
1645            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1646                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1647                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1648                    && inputStream->width == newStream->width
1649                    && inputStream->height == newStream->height) {
1650                if (zslStream != NULL) {
1651                    /* This scenario indicates multiple YUV streams with same size
1652                     * as input stream have been requested, since zsl stream handle
1653                     * is solely use for the purpose of overriding the size of streams
1654                     * which share h/w streams we will just make a guess here as to
1655                     * which of the stream is a ZSL stream, this will be refactored
1656                     * once we make generic logic for streams sharing encoder output
1657                     */
1658                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1659                }
1660                zslStream = newStream;
1661            }
1662        }
1663        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1664            jpegStream = newStream;
1665        }
1666    }
1667
1668    /* If a zsl stream is set, we know that we have configured at least one input or
1669       bidirectional stream */
1670    if (NULL != zslStream) {
1671        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1672        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1673        mInputStreamInfo.format = zslStream->format;
1674        mInputStreamInfo.usage = zslStream->usage;
1675        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1676                 mInputStreamInfo.dim.width,
1677                mInputStreamInfo.dim.height,
1678                mInputStreamInfo.format, mInputStreamInfo.usage);
1679    }
1680
1681    cleanAndSortStreamInfo();
1682    if (mMetadataChannel) {
1683        delete mMetadataChannel;
1684        mMetadataChannel = NULL;
1685    }
1686    if (mSupportChannel) {
1687        delete mSupportChannel;
1688        mSupportChannel = NULL;
1689    }
1690
1691    if (mAnalysisChannel) {
1692        delete mAnalysisChannel;
1693        mAnalysisChannel = NULL;
1694    }
1695
1696    if (mDummyBatchChannel) {
1697        delete mDummyBatchChannel;
1698        mDummyBatchChannel = NULL;
1699    }
1700
1701    //Create metadata channel and initialize it
1702    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1703                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1704                    &padding_info, CAM_QCOM_FEATURE_NONE, this);
1705    if (mMetadataChannel == NULL) {
1706        LOGE("failed to allocate metadata channel");
1707        rc = -ENOMEM;
1708        pthread_mutex_unlock(&mMutex);
1709        return rc;
1710    }
1711    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1712    if (rc < 0) {
1713        LOGE("metadata channel initialization failed");
1714        delete mMetadataChannel;
1715        mMetadataChannel = NULL;
1716        pthread_mutex_unlock(&mMutex);
1717        return rc;
1718    }
1719
1720    // Create analysis stream all the time, even when h/w support is not available
1721    {
1722        mAnalysisChannel = new QCamera3SupportChannel(
1723                mCameraHandle->camera_handle,
1724                mChannelHandle,
1725                mCameraHandle->ops,
1726                &gCamCapability[mCameraId]->analysis_padding_info,
1727                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1728                CAM_STREAM_TYPE_ANALYSIS,
1729                &gCamCapability[mCameraId]->analysis_recommended_res,
1730                (gCamCapability[mCameraId]->analysis_recommended_format
1731                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1732                : CAM_FORMAT_YUV_420_NV21),
1733                gCamCapability[mCameraId]->hw_analysis_supported,
1734                this,
1735                0); // force buffer count to 0
1736        if (!mAnalysisChannel) {
1737            LOGE("H/W Analysis channel cannot be created");
1738            pthread_mutex_unlock(&mMutex);
1739            return -ENOMEM;
1740        }
1741    }
1742
1743    bool isRawStreamRequested = false;
1744    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1745    /* Allocate channel objects for the requested streams */
1746    for (size_t i = 0; i < streamList->num_streams; i++) {
1747        camera3_stream_t *newStream = streamList->streams[i];
1748        uint32_t stream_usage = newStream->usage;
1749        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1750        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1751        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1752                || IS_USAGE_ZSL(newStream->usage)) &&
1753            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1754            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1755            if (bUseCommonFeatureMask) {
1756                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1757                        commonFeatureMask;
1758            } else {
1759                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1760                        CAM_QCOM_FEATURE_NONE;
1761            }
1762
1763        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1764                LOGH("Input stream configured, reprocess config");
1765        } else {
1766            //for non zsl streams find out the format
1767            switch (newStream->format) {
1768            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1769            {
1770                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1771                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1772                /* add additional features to pp feature mask */
1773                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1774                        mStreamConfigInfo.num_streams);
1775
1776                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1777                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1778                                CAM_STREAM_TYPE_VIDEO;
1779                    if (m_bTnrEnabled && m_bTnrVideo) {
1780                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1781                            CAM_QCOM_FEATURE_CPP_TNR;
1782                    }
1783                } else {
1784                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1785                            CAM_STREAM_TYPE_PREVIEW;
1786                    if (m_bTnrEnabled && m_bTnrPreview) {
1787                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1788                                CAM_QCOM_FEATURE_CPP_TNR;
1789                    }
1790                    padding_info.width_padding = mSurfaceStridePadding;
1791                    padding_info.height_padding = CAM_PAD_TO_2;
1792                }
1793                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1794                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1795                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1796                            newStream->height;
1797                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1798                            newStream->width;
1799                }
1800            }
1801            break;
1802            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1803                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1804                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1805                    if (bUseCommonFeatureMask)
1806                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1807                                commonFeatureMask;
1808                    else
1809                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1810                                CAM_QCOM_FEATURE_NONE;
1811                } else {
1812                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1813                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1814                }
1815            break;
1816            case HAL_PIXEL_FORMAT_BLOB:
1817                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1818                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1819                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1820                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1821                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1822                } else {
1823                    if (bUseCommonFeatureMask &&
1824                            isOnEncoder(maxViewfinderSize, newStream->width,
1825                            newStream->height)) {
1826                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1827                    } else {
1828                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1829                    }
1830                }
1831                if (isZsl) {
1832                    if (zslStream) {
1833                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1834                                (int32_t)zslStream->width;
1835                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1836                                (int32_t)zslStream->height;
1837                    } else {
1838                        LOGE("Error, No ZSL stream identified");
1839                        pthread_mutex_unlock(&mMutex);
1840                        return -EINVAL;
1841                    }
1842                } else if (m_bIs4KVideo) {
1843                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1844                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1845                } else if (bYuv888OverrideJpeg) {
1846                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1847                            (int32_t)largeYuv888Size.width;
1848                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1849                            (int32_t)largeYuv888Size.height;
1850                }
1851                break;
1852            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1853            case HAL_PIXEL_FORMAT_RAW16:
1854            case HAL_PIXEL_FORMAT_RAW10:
1855                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1856                isRawStreamRequested = true;
1857                break;
1858            default:
1859                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1860                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1861                break;
1862            }
1863        }
1864
1865        if (newStream->priv == NULL) {
1866            //New stream, construct channel
1867            switch (newStream->stream_type) {
1868            case CAMERA3_STREAM_INPUT:
1869                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1870                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1871                break;
1872            case CAMERA3_STREAM_BIDIRECTIONAL:
1873                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1874                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1875                break;
1876            case CAMERA3_STREAM_OUTPUT:
1877                /* For video encoding stream, set read/write rarely
1878                 * flag so that they may be set to un-cached */
1879                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1880                    newStream->usage |=
1881                         (GRALLOC_USAGE_SW_READ_RARELY |
1882                         GRALLOC_USAGE_SW_WRITE_RARELY |
1883                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1884                else if (IS_USAGE_ZSL(newStream->usage))
1885                {
1886                    LOGD("ZSL usage flag skipping");
1887                }
1888                else if (newStream == zslStream
1889                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1890                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1891                } else
1892                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1893                break;
1894            default:
1895                LOGE("Invalid stream_type %d", newStream->stream_type);
1896                break;
1897            }
1898
1899            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1900                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1901                QCamera3ProcessingChannel *channel = NULL;
1902                switch (newStream->format) {
1903                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1904                    if ((newStream->usage &
1905                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1906                            (streamList->operation_mode ==
1907                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1908                    ) {
1909                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1910                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1911                                &gCamCapability[mCameraId]->padding_info,
1912                                this,
1913                                newStream,
1914                                (cam_stream_type_t)
1915                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1916                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1917                                mMetadataChannel,
1918                                0); //heap buffers are not required for HFR video channel
1919                        if (channel == NULL) {
1920                            LOGE("allocation of channel failed");
1921                            pthread_mutex_unlock(&mMutex);
1922                            return -ENOMEM;
1923                        }
1924                        //channel->getNumBuffers() will return 0 here so use
1925                        //MAX_INFLIGH_HFR_REQUESTS
1926                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1927                        newStream->priv = channel;
1928                        LOGI("num video buffers in HFR mode: %d",
1929                                 MAX_INFLIGHT_HFR_REQUESTS);
1930                    } else {
1931                        /* Copy stream contents in HFR preview only case to create
1932                         * dummy batch channel so that sensor streaming is in
1933                         * HFR mode */
1934                        if (!m_bIsVideo && (streamList->operation_mode ==
1935                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1936                            mDummyBatchStream = *newStream;
1937                        }
1938                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1939                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1940                                &gCamCapability[mCameraId]->padding_info,
1941                                this,
1942                                newStream,
1943                                (cam_stream_type_t)
1944                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1945                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1946                                mMetadataChannel,
1947                                MAX_INFLIGHT_REQUESTS);
1948                        if (channel == NULL) {
1949                            LOGE("allocation of channel failed");
1950                            pthread_mutex_unlock(&mMutex);
1951                            return -ENOMEM;
1952                        }
1953                        newStream->max_buffers = channel->getNumBuffers();
1954                        newStream->priv = channel;
1955                    }
1956                    break;
1957                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1958                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1959                            mChannelHandle,
1960                            mCameraHandle->ops, captureResultCb,
1961                            &padding_info,
1962                            this,
1963                            newStream,
1964                            (cam_stream_type_t)
1965                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1966                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1967                            mMetadataChannel);
1968                    if (channel == NULL) {
1969                        LOGE("allocation of YUV channel failed");
1970                        pthread_mutex_unlock(&mMutex);
1971                        return -ENOMEM;
1972                    }
1973                    newStream->max_buffers = channel->getNumBuffers();
1974                    newStream->priv = channel;
1975                    break;
1976                }
1977                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1978                case HAL_PIXEL_FORMAT_RAW16:
1979                case HAL_PIXEL_FORMAT_RAW10:
1980                    mRawChannel = new QCamera3RawChannel(
1981                            mCameraHandle->camera_handle, mChannelHandle,
1982                            mCameraHandle->ops, captureResultCb,
1983                            &padding_info,
1984                            this, newStream, CAM_QCOM_FEATURE_NONE,
1985                            mMetadataChannel,
1986                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1987                    if (mRawChannel == NULL) {
1988                        LOGE("allocation of raw channel failed");
1989                        pthread_mutex_unlock(&mMutex);
1990                        return -ENOMEM;
1991                    }
1992                    newStream->max_buffers = mRawChannel->getNumBuffers();
1993                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1994                    break;
1995                case HAL_PIXEL_FORMAT_BLOB:
1996                    // Max live snapshot inflight buffer is 1. This is to mitigate
1997                    // frame drop issues for video snapshot. The more buffers being
1998                    // allocated, the more frame drops there are.
1999                    mPictureChannel = new QCamera3PicChannel(
2000                            mCameraHandle->camera_handle, mChannelHandle,
2001                            mCameraHandle->ops, captureResultCb,
2002                            &padding_info, this, newStream,
2003                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2004                            m_bIs4KVideo, isZsl, mMetadataChannel,
2005                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2006                    if (mPictureChannel == NULL) {
2007                        LOGE("allocation of channel failed");
2008                        pthread_mutex_unlock(&mMutex);
2009                        return -ENOMEM;
2010                    }
2011                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2012                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2013                    mPictureChannel->overrideYuvSize(
2014                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2015                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2016                    break;
2017
2018                default:
2019                    LOGE("not a supported format 0x%x", newStream->format);
2020                    break;
2021                }
2022            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2023                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2024            } else {
2025                LOGE("Error, Unknown stream type");
2026                pthread_mutex_unlock(&mMutex);
2027                return -EINVAL;
2028            }
2029
2030            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2031            if (channel != NULL && channel->isUBWCEnabled()) {
2032                cam_format_t fmt = channel->getStreamDefaultFormat(
2033                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
2034                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2035                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2036                }
2037            }
2038
2039            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2040                    it != mStreamInfo.end(); it++) {
2041                if ((*it)->stream == newStream) {
2042                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2043                    break;
2044                }
2045            }
2046        } else {
2047            // Channel already exists for this stream
2048            // Do nothing for now
2049        }
2050        padding_info = gCamCapability[mCameraId]->padding_info;
2051
2052        /* Do not add entries for input stream in metastream info
2053         * since there is no real stream associated with it
2054         */
2055        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2056            mStreamConfigInfo.num_streams++;
2057    }
2058
2059    //RAW DUMP channel
2060    if (mEnableRawDump && isRawStreamRequested == false){
2061        cam_dimension_t rawDumpSize;
2062        rawDumpSize = getMaxRawSize(mCameraId);
2063        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2064                                  mChannelHandle,
2065                                  mCameraHandle->ops,
2066                                  rawDumpSize,
2067                                  &padding_info,
2068                                  this, CAM_QCOM_FEATURE_NONE);
2069        if (!mRawDumpChannel) {
2070            LOGE("Raw Dump channel cannot be created");
2071            pthread_mutex_unlock(&mMutex);
2072            return -ENOMEM;
2073        }
2074    }
2075
2076
2077    if (mAnalysisChannel) {
2078        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2079                gCamCapability[mCameraId]->analysis_recommended_res;
2080        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2081                CAM_STREAM_TYPE_ANALYSIS;
2082        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2083                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2084        mStreamConfigInfo.num_streams++;
2085    }
2086
2087    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2088        mSupportChannel = new QCamera3SupportChannel(
2089                mCameraHandle->camera_handle,
2090                mChannelHandle,
2091                mCameraHandle->ops,
2092                &gCamCapability[mCameraId]->padding_info,
2093                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
2094                CAM_STREAM_TYPE_CALLBACK,
2095                &QCamera3SupportChannel::kDim,
2096                CAM_FORMAT_YUV_420_NV21,
2097                gCamCapability[mCameraId]->hw_analysis_supported,
2098                this);
2099        if (!mSupportChannel) {
2100            LOGE("dummy channel cannot be created");
2101            pthread_mutex_unlock(&mMutex);
2102            return -ENOMEM;
2103        }
2104    }
2105
2106    if (mSupportChannel) {
2107        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2108                QCamera3SupportChannel::kDim;
2109        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2110                CAM_STREAM_TYPE_CALLBACK;
2111        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2112                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2113        mStreamConfigInfo.num_streams++;
2114    }
2115
2116    if (mRawDumpChannel) {
2117        cam_dimension_t rawSize;
2118        rawSize = getMaxRawSize(mCameraId);
2119        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2120                rawSize;
2121        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2122                CAM_STREAM_TYPE_RAW;
2123        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2124                CAM_QCOM_FEATURE_NONE;
2125        mStreamConfigInfo.num_streams++;
2126    }
2127    /* In HFR mode, if video stream is not added, create a dummy channel so that
2128     * ISP can create a batch mode even for preview only case. This channel is
2129     * never 'start'ed (no stream-on), it is only 'initialized'  */
2130    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2131            !m_bIsVideo) {
2132        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2133                mChannelHandle,
2134                mCameraHandle->ops, captureResultCb,
2135                &gCamCapability[mCameraId]->padding_info,
2136                this,
2137                &mDummyBatchStream,
2138                CAM_STREAM_TYPE_VIDEO,
2139                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
2140                mMetadataChannel);
2141        if (NULL == mDummyBatchChannel) {
2142            LOGE("creation of mDummyBatchChannel failed."
2143                    "Preview will use non-hfr sensor mode ");
2144        }
2145    }
2146    if (mDummyBatchChannel) {
2147        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2148                mDummyBatchStream.width;
2149        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2150                mDummyBatchStream.height;
2151        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2152                CAM_STREAM_TYPE_VIDEO;
2153        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2154                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2155        mStreamConfigInfo.num_streams++;
2156    }
2157
2158    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2159    mStreamConfigInfo.buffer_info.max_buffers =
2160            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2161
2162    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2163    for (pendingRequestIterator i = mPendingRequestsList.begin();
2164            i != mPendingRequestsList.end();) {
2165        i = erasePendingRequest(i);
2166    }
2167    mPendingFrameDropList.clear();
2168    // Initialize/Reset the pending buffers list
2169    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2170        req.mPendingBufferList.clear();
2171    }
2172    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2173
2174    mPendingReprocessResultList.clear();
2175
2176    mCurJpegMeta.clear();
2177    //Get min frame duration for this streams configuration
2178    deriveMinFrameDuration();
2179
2180    // Update state
2181    mState = CONFIGURED;
2182
2183    pthread_mutex_unlock(&mMutex);
2184
2185    return rc;
2186}
2187
2188/*===========================================================================
2189 * FUNCTION   : validateCaptureRequest
2190 *
2191 * DESCRIPTION: validate a capture request from camera service
2192 *
2193 * PARAMETERS :
2194 *   @request : request from framework to process
2195 *
2196 * RETURN     :
2197 *
2198 *==========================================================================*/
2199int QCamera3HardwareInterface::validateCaptureRequest(
2200                    camera3_capture_request_t *request)
2201{
2202    ssize_t idx = 0;
2203    const camera3_stream_buffer_t *b;
2204    CameraMetadata meta;
2205
2206    /* Sanity check the request */
2207    if (request == NULL) {
2208        LOGE("NULL capture request");
2209        return BAD_VALUE;
2210    }
2211
2212    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2213        /*settings cannot be null for the first request*/
2214        return BAD_VALUE;
2215    }
2216
2217    uint32_t frameNumber = request->frame_number;
2218    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2219        LOGE("Request %d: No output buffers provided!",
2220                __FUNCTION__, frameNumber);
2221        return BAD_VALUE;
2222    }
2223    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2224        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2225                 request->num_output_buffers, MAX_NUM_STREAMS);
2226        return BAD_VALUE;
2227    }
2228    if (request->input_buffer != NULL) {
2229        b = request->input_buffer;
2230        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2231            LOGE("Request %d: Buffer %ld: Status not OK!",
2232                     frameNumber, (long)idx);
2233            return BAD_VALUE;
2234        }
2235        if (b->release_fence != -1) {
2236            LOGE("Request %d: Buffer %ld: Has a release fence!",
2237                     frameNumber, (long)idx);
2238            return BAD_VALUE;
2239        }
2240        if (b->buffer == NULL) {
2241            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2242                     frameNumber, (long)idx);
2243            return BAD_VALUE;
2244        }
2245    }
2246
2247    // Validate all buffers
2248    b = request->output_buffers;
2249    do {
2250        QCamera3ProcessingChannel *channel =
2251                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2252        if (channel == NULL) {
2253            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2254                     frameNumber, (long)idx);
2255            return BAD_VALUE;
2256        }
2257        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2258            LOGE("Request %d: Buffer %ld: Status not OK!",
2259                     frameNumber, (long)idx);
2260            return BAD_VALUE;
2261        }
2262        if (b->release_fence != -1) {
2263            LOGE("Request %d: Buffer %ld: Has a release fence!",
2264                     frameNumber, (long)idx);
2265            return BAD_VALUE;
2266        }
2267        if (b->buffer == NULL) {
2268            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2269                     frameNumber, (long)idx);
2270            return BAD_VALUE;
2271        }
2272        if (*(b->buffer) == NULL) {
2273            LOGE("Request %d: Buffer %ld: NULL private handle!",
2274                     frameNumber, (long)idx);
2275            return BAD_VALUE;
2276        }
2277        idx++;
2278        b = request->output_buffers + idx;
2279    } while (idx < (ssize_t)request->num_output_buffers);
2280
2281    return NO_ERROR;
2282}
2283
2284/*===========================================================================
2285 * FUNCTION   : deriveMinFrameDuration
2286 *
2287 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2288 *              on currently configured streams.
2289 *
2290 * PARAMETERS : NONE
2291 *
2292 * RETURN     : NONE
2293 *
2294 *==========================================================================*/
2295void QCamera3HardwareInterface::deriveMinFrameDuration()
2296{
2297    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2298
2299    maxJpegDim = 0;
2300    maxProcessedDim = 0;
2301    maxRawDim = 0;
2302
2303    // Figure out maximum jpeg, processed, and raw dimensions
2304    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2305        it != mStreamInfo.end(); it++) {
2306
2307        // Input stream doesn't have valid stream_type
2308        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2309            continue;
2310
2311        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2312        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2313            if (dimension > maxJpegDim)
2314                maxJpegDim = dimension;
2315        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2316                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2317                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2318            if (dimension > maxRawDim)
2319                maxRawDim = dimension;
2320        } else {
2321            if (dimension > maxProcessedDim)
2322                maxProcessedDim = dimension;
2323        }
2324    }
2325
2326    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2327            MAX_SIZES_CNT);
2328
2329    //Assume all jpeg dimensions are in processed dimensions.
2330    if (maxJpegDim > maxProcessedDim)
2331        maxProcessedDim = maxJpegDim;
2332    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2333    if (maxProcessedDim > maxRawDim) {
2334        maxRawDim = INT32_MAX;
2335
2336        for (size_t i = 0; i < count; i++) {
2337            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2338                    gCamCapability[mCameraId]->raw_dim[i].height;
2339            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2340                maxRawDim = dimension;
2341        }
2342    }
2343
2344    //Find minimum durations for processed, jpeg, and raw
2345    for (size_t i = 0; i < count; i++) {
2346        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2347                gCamCapability[mCameraId]->raw_dim[i].height) {
2348            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2349            break;
2350        }
2351    }
2352    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2353    for (size_t i = 0; i < count; i++) {
2354        if (maxProcessedDim ==
2355                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2356                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2357            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2358            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2359            break;
2360        }
2361    }
2362}
2363
2364/*===========================================================================
2365 * FUNCTION   : getMinFrameDuration
2366 *
2367 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2368 *              and current request configuration.
2369 *
2370 * PARAMETERS : @request: requset sent by the frameworks
2371 *
2372 * RETURN     : min farme duration for a particular request
2373 *
2374 *==========================================================================*/
2375int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2376{
2377    bool hasJpegStream = false;
2378    bool hasRawStream = false;
2379    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2380        const camera3_stream_t *stream = request->output_buffers[i].stream;
2381        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2382            hasJpegStream = true;
2383        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2384                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2385                stream->format == HAL_PIXEL_FORMAT_RAW16)
2386            hasRawStream = true;
2387    }
2388
2389    if (!hasJpegStream)
2390        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2391    else
2392        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2393}
2394
2395/*===========================================================================
2396 * FUNCTION   : handleBuffersDuringFlushLock
2397 *
2398 * DESCRIPTION: Account for buffers returned from back-end during flush
2399 *              This function is executed while mMutex is held by the caller.
2400 *
2401 * PARAMETERS :
2402 *   @buffer: image buffer for the callback
2403 *
2404 * RETURN     :
2405 *==========================================================================*/
2406void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2407{
2408    bool buffer_found = false;
2409    for (List<PendingBuffersInRequest>::iterator req =
2410            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2411            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2412        for (List<PendingBufferInfo>::iterator i =
2413                req->mPendingBufferList.begin();
2414                i != req->mPendingBufferList.end(); i++) {
2415            if (i->buffer == buffer->buffer) {
2416                mPendingBuffersMap.numPendingBufsAtFlush--;
2417                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2418                    buffer->buffer, req->frame_number,
2419                    mPendingBuffersMap.numPendingBufsAtFlush);
2420                buffer_found = true;
2421                break;
2422            }
2423        }
2424        if (buffer_found) {
2425            break;
2426        }
2427    }
2428    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2429        //signal the flush()
2430        LOGD("All buffers returned to HAL. Continue flush");
2431        pthread_cond_signal(&mBuffersCond);
2432    }
2433}
2434
2435
2436/*===========================================================================
2437 * FUNCTION   : handlePendingReprocResults
2438 *
2439 * DESCRIPTION: check and notify on any pending reprocess results
2440 *
2441 * PARAMETERS :
2442 *   @frame_number   : Pending request frame number
2443 *
2444 * RETURN     : int32_t type of status
2445 *              NO_ERROR  -- success
2446 *              none-zero failure code
2447 *==========================================================================*/
2448int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2449{
2450    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2451            j != mPendingReprocessResultList.end(); j++) {
2452        if (j->frame_number == frame_number) {
2453            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2454
2455            LOGD("Delayed reprocess notify %d",
2456                    frame_number);
2457
2458            for (pendingRequestIterator k = mPendingRequestsList.begin();
2459                    k != mPendingRequestsList.end(); k++) {
2460
2461                if (k->frame_number == j->frame_number) {
2462                    LOGD("Found reprocess frame number %d in pending reprocess List "
2463                            "Take it out!!",
2464                            k->frame_number);
2465
2466                    camera3_capture_result result;
2467                    memset(&result, 0, sizeof(camera3_capture_result));
2468                    result.frame_number = frame_number;
2469                    result.num_output_buffers = 1;
2470                    result.output_buffers =  &j->buffer;
2471                    result.input_buffer = k->input_buffer;
2472                    result.result = k->settings;
2473                    result.partial_result = PARTIAL_RESULT_COUNT;
2474                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2475
2476                    erasePendingRequest(k);
2477                    break;
2478                }
2479            }
2480            mPendingReprocessResultList.erase(j);
2481            break;
2482        }
2483    }
2484    return NO_ERROR;
2485}
2486
2487/*===========================================================================
2488 * FUNCTION   : handleBatchMetadata
2489 *
2490 * DESCRIPTION: Handles metadata buffer callback in batch mode
2491 *
2492 * PARAMETERS : @metadata_buf: metadata buffer
2493 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2494 *                 the meta buf in this method
2495 *
2496 * RETURN     :
2497 *
2498 *==========================================================================*/
2499void QCamera3HardwareInterface::handleBatchMetadata(
2500        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2501{
2502    ATRACE_CALL();
2503
2504    if (NULL == metadata_buf) {
2505        LOGE("metadata_buf is NULL");
2506        return;
2507    }
2508    /* In batch mode, the metdata will contain the frame number and timestamp of
2509     * the last frame in the batch. Eg: a batch containing buffers from request
2510     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2511     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2512     * multiple process_capture_results */
2513    metadata_buffer_t *metadata =
2514            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2515    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2516    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2517    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2518    uint32_t frame_number = 0, urgent_frame_number = 0;
2519    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2520    bool invalid_metadata = false;
2521    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2522    size_t loopCount = 1;
2523
2524    int32_t *p_frame_number_valid =
2525            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2526    uint32_t *p_frame_number =
2527            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2528    int64_t *p_capture_time =
2529            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2530    int32_t *p_urgent_frame_number_valid =
2531            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2532    uint32_t *p_urgent_frame_number =
2533            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2534
2535    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2536            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2537            (NULL == p_urgent_frame_number)) {
2538        LOGE("Invalid metadata");
2539        invalid_metadata = true;
2540    } else {
2541        frame_number_valid = *p_frame_number_valid;
2542        last_frame_number = *p_frame_number;
2543        last_frame_capture_time = *p_capture_time;
2544        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2545        last_urgent_frame_number = *p_urgent_frame_number;
2546    }
2547
2548    /* In batchmode, when no video buffers are requested, set_parms are sent
2549     * for every capture_request. The difference between consecutive urgent
2550     * frame numbers and frame numbers should be used to interpolate the
2551     * corresponding frame numbers and time stamps */
2552    pthread_mutex_lock(&mMutex);
2553    if (urgent_frame_number_valid) {
2554        first_urgent_frame_number =
2555                mPendingBatchMap.valueFor(last_urgent_frame_number);
2556        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2557                first_urgent_frame_number;
2558
2559        LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2560                 urgent_frame_number_valid,
2561                first_urgent_frame_number, last_urgent_frame_number);
2562    }
2563
2564    if (frame_number_valid) {
2565        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2566        frameNumDiff = last_frame_number + 1 -
2567                first_frame_number;
2568        mPendingBatchMap.removeItem(last_frame_number);
2569
2570        LOGD("frm: valid: %d frm_num: %d - %d",
2571                 frame_number_valid,
2572                first_frame_number, last_frame_number);
2573
2574    }
2575    pthread_mutex_unlock(&mMutex);
2576
2577    if (urgent_frame_number_valid || frame_number_valid) {
2578        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2579        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2580            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2581                     urgentFrameNumDiff, last_urgent_frame_number);
2582        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2583            LOGE("frameNumDiff: %d frameNum: %d",
2584                     frameNumDiff, last_frame_number);
2585    }
2586
2587    for (size_t i = 0; i < loopCount; i++) {
2588        /* handleMetadataWithLock is called even for invalid_metadata for
2589         * pipeline depth calculation */
2590        if (!invalid_metadata) {
2591            /* Infer frame number. Batch metadata contains frame number of the
2592             * last frame */
2593            if (urgent_frame_number_valid) {
2594                if (i < urgentFrameNumDiff) {
2595                    urgent_frame_number =
2596                            first_urgent_frame_number + i;
2597                    LOGD("inferred urgent frame_number: %d",
2598                             urgent_frame_number);
2599                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2600                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2601                } else {
2602                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2603                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2604                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2605                }
2606            }
2607
2608            /* Infer frame number. Batch metadata contains frame number of the
2609             * last frame */
2610            if (frame_number_valid) {
2611                if (i < frameNumDiff) {
2612                    frame_number = first_frame_number + i;
2613                    LOGD("inferred frame_number: %d", frame_number);
2614                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2615                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2616                } else {
2617                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2618                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2619                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2620                }
2621            }
2622
2623            if (last_frame_capture_time) {
2624                //Infer timestamp
2625                first_frame_capture_time = last_frame_capture_time -
2626                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2627                capture_time =
2628                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2629                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2630                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2631                LOGD("batch capture_time: %lld, capture_time: %lld",
2632                         last_frame_capture_time, capture_time);
2633            }
2634        }
2635        pthread_mutex_lock(&mMutex);
2636        handleMetadataWithLock(metadata_buf,
2637                false /* free_and_bufdone_meta_buf */);
2638        pthread_mutex_unlock(&mMutex);
2639    }
2640
2641    /* BufDone metadata buffer */
2642    if (free_and_bufdone_meta_buf) {
2643        mMetadataChannel->bufDone(metadata_buf);
2644        free(metadata_buf);
2645    }
2646}
2647
2648/*===========================================================================
2649 * FUNCTION   : handleMetadataWithLock
2650 *
2651 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2652 *
2653 * PARAMETERS : @metadata_buf: metadata buffer
2654 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2655 *                 the meta buf in this method
2656 *
2657 * RETURN     :
2658 *
2659 *==========================================================================*/
2660void QCamera3HardwareInterface::handleMetadataWithLock(
2661    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2662{
2663    ATRACE_CALL();
2664    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2665        //during flush do not send metadata from this thread
2666        LOGD("not sending metadata during flush or when mState is error");
2667        if (free_and_bufdone_meta_buf) {
2668            mMetadataChannel->bufDone(metadata_buf);
2669            free(metadata_buf);
2670        }
2671        return;
2672    }
2673
2674    //not in flush
2675    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2676    int32_t frame_number_valid, urgent_frame_number_valid;
2677    uint32_t frame_number, urgent_frame_number;
2678    int64_t capture_time;
2679    nsecs_t currentSysTime;
2680
2681    int32_t *p_frame_number_valid =
2682            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2683    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2684    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2685    int32_t *p_urgent_frame_number_valid =
2686            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2687    uint32_t *p_urgent_frame_number =
2688            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2689    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2690            metadata) {
2691        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2692                 *p_frame_number_valid, *p_frame_number);
2693    }
2694
2695    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2696            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2697        LOGE("Invalid metadata");
2698        if (free_and_bufdone_meta_buf) {
2699            mMetadataChannel->bufDone(metadata_buf);
2700            free(metadata_buf);
2701        }
2702        goto done_metadata;
2703    }
2704    frame_number_valid =        *p_frame_number_valid;
2705    frame_number =              *p_frame_number;
2706    capture_time =              *p_capture_time;
2707    urgent_frame_number_valid = *p_urgent_frame_number_valid;
2708    urgent_frame_number =       *p_urgent_frame_number;
2709    currentSysTime =            systemTime(CLOCK_MONOTONIC);
2710
2711    // Detect if buffers from any requests are overdue
2712    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2713        if ( (currentSysTime - req.timestamp) >
2714            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2715            for (auto &missed : req.mPendingBufferList) {
2716                LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
2717                    "stream type = %d, stream format = %d",
2718                    frame_number, req.frame_number, missed.buffer,
2719                    missed.stream->stream_type, missed.stream->format);
2720            }
2721        }
2722    }
2723    //Partial result on process_capture_result for timestamp
2724    if (urgent_frame_number_valid) {
2725        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2726           urgent_frame_number, capture_time);
2727
2728        //Recieved an urgent Frame Number, handle it
2729        //using partial results
2730        for (pendingRequestIterator i =
2731                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2732            LOGD("Iterator Frame = %d urgent frame = %d",
2733                 i->frame_number, urgent_frame_number);
2734
2735            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2736                (i->partial_result_cnt == 0)) {
2737                LOGE("Error: HAL missed urgent metadata for frame number %d",
2738                         i->frame_number);
2739            }
2740
2741            if (i->frame_number == urgent_frame_number &&
2742                     i->bUrgentReceived == 0) {
2743
2744                camera3_capture_result_t result;
2745                memset(&result, 0, sizeof(camera3_capture_result_t));
2746
2747                i->partial_result_cnt++;
2748                i->bUrgentReceived = 1;
2749                // Extract 3A metadata
2750                result.result =
2751                    translateCbUrgentMetadataToResultMetadata(metadata);
2752                // Populate metadata result
2753                result.frame_number = urgent_frame_number;
2754                result.num_output_buffers = 0;
2755                result.output_buffers = NULL;
2756                result.partial_result = i->partial_result_cnt;
2757
2758                mCallbackOps->process_capture_result(mCallbackOps, &result);
2759                LOGD("urgent frame_number = %u, capture_time = %lld",
2760                      result.frame_number, capture_time);
2761                free_camera_metadata((camera_metadata_t *)result.result);
2762                break;
2763            }
2764        }
2765    }
2766
2767    if (!frame_number_valid) {
2768        LOGD("Not a valid normal frame number, used as SOF only");
2769        if (free_and_bufdone_meta_buf) {
2770            mMetadataChannel->bufDone(metadata_buf);
2771            free(metadata_buf);
2772        }
2773        goto done_metadata;
2774    }
2775    LOGH("valid frame_number = %u, capture_time = %lld",
2776            frame_number, capture_time);
2777
2778    for (pendingRequestIterator i = mPendingRequestsList.begin();
2779            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2780        // Flush out all entries with less or equal frame numbers.
2781
2782        camera3_capture_result_t result;
2783        memset(&result, 0, sizeof(camera3_capture_result_t));
2784
2785        LOGD("frame_number in the list is %u", i->frame_number);
2786        i->partial_result_cnt++;
2787        result.partial_result = i->partial_result_cnt;
2788
2789        // Check whether any stream buffer corresponding to this is dropped or not
2790        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2791        // The API does not expect a blob buffer to be dropped
2792        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2793            /* Clear notify_msg structure */
2794            camera3_notify_msg_t notify_msg;
2795            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2796            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2797                    j != i->buffers.end(); j++) {
2798                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2799                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2800                for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2801                    if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2802                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2803                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2804                                __func__, i->frame_number, streamID, j->stream->format);
2805                        notify_msg.type = CAMERA3_MSG_ERROR;
2806                        notify_msg.message.error.frame_number = i->frame_number;
2807                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2808                        notify_msg.message.error.error_stream = j->stream;
2809                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2810                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2811                                __func__, i->frame_number, streamID, j->stream->format);
2812                        PendingFrameDropInfo PendingFrameDrop;
2813                        PendingFrameDrop.frame_number=i->frame_number;
2814                        PendingFrameDrop.stream_ID = streamID;
2815                        // Add the Frame drop info to mPendingFrameDropList
2816                        mPendingFrameDropList.push_back(PendingFrameDrop);
2817                   }
2818               }
2819            }
2820        }
2821
2822        // Send empty metadata with already filled buffers for dropped metadata
2823        // and send valid metadata with already filled buffers for current metadata
2824        /* we could hit this case when we either
2825         * 1. have a pending reprocess request or
2826         * 2. miss a metadata buffer callback */
2827        if (i->frame_number < frame_number) {
2828            if (i->input_buffer) {
2829                /* this will be handled in handleInputBufferWithLock */
2830                i++;
2831                continue;
2832            } else {
2833                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
2834                if (free_and_bufdone_meta_buf) {
2835                    mMetadataChannel->bufDone(metadata_buf);
2836                    free(metadata_buf);
2837                }
2838                mState = ERROR;
2839                goto done_metadata;
2840            }
2841        } else {
2842            mPendingLiveRequest--;
2843            /* Clear notify_msg structure */
2844            camera3_notify_msg_t notify_msg;
2845            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2846
2847            // Send shutter notify to frameworks
2848            notify_msg.type = CAMERA3_MSG_SHUTTER;
2849            notify_msg.message.shutter.frame_number = i->frame_number;
2850            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2851            mCallbackOps->notify(mCallbackOps, &notify_msg);
2852
2853            i->timestamp = capture_time;
2854
2855            // Find channel requiring metadata, meaning internal offline postprocess
2856            // is needed.
2857            //TODO: for now, we don't support two streams requiring metadata at the same time.
2858            // (because we are not making copies, and metadata buffer is not reference counted.
2859            bool internalPproc = false;
2860            for (pendingBufferIterator iter = i->buffers.begin();
2861                    iter != i->buffers.end(); iter++) {
2862                if (iter->need_metadata) {
2863                    internalPproc = true;
2864                    QCamera3ProcessingChannel *channel =
2865                            (QCamera3ProcessingChannel *)iter->stream->priv;
2866                    channel->queueReprocMetadata(metadata_buf);
2867                    break;
2868                }
2869            }
2870
2871            result.result = translateFromHalMetadata(metadata,
2872                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2873                    i->capture_intent, internalPproc, i->fwkCacMode);
2874
2875            saveExifParams(metadata);
2876
2877            if (i->blob_request) {
2878                {
2879                    //Dump tuning metadata if enabled and available
2880                    char prop[PROPERTY_VALUE_MAX];
2881                    memset(prop, 0, sizeof(prop));
2882                    property_get("persist.camera.dumpmetadata", prop, "0");
2883                    int32_t enabled = atoi(prop);
2884                    if (enabled && metadata->is_tuning_params_valid) {
2885                        dumpMetadataToFile(metadata->tuning_params,
2886                               mMetaFrameCount,
2887                               enabled,
2888                               "Snapshot",
2889                               frame_number);
2890                    }
2891                }
2892            }
2893
2894            if (!internalPproc) {
2895                LOGD("couldn't find need_metadata for this metadata");
2896                // Return metadata buffer
2897                if (free_and_bufdone_meta_buf) {
2898                    mMetadataChannel->bufDone(metadata_buf);
2899                    free(metadata_buf);
2900                }
2901            }
2902        }
2903        if (!result.result) {
2904            LOGE("metadata is NULL");
2905        }
2906        result.frame_number = i->frame_number;
2907        result.input_buffer = i->input_buffer;
2908        result.num_output_buffers = 0;
2909        result.output_buffers = NULL;
2910        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2911                    j != i->buffers.end(); j++) {
2912            if (j->buffer) {
2913                result.num_output_buffers++;
2914            }
2915        }
2916
2917        updateFpsInPreviewBuffer(metadata, i->frame_number);
2918
2919        if (result.num_output_buffers > 0) {
2920            camera3_stream_buffer_t *result_buffers =
2921                new camera3_stream_buffer_t[result.num_output_buffers];
2922            if (result_buffers != NULL) {
2923                size_t result_buffers_idx = 0;
2924                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2925                        j != i->buffers.end(); j++) {
2926                    if (j->buffer) {
2927                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2928                                m != mPendingFrameDropList.end(); m++) {
2929                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2930                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2931                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2932                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2933                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
2934                                        frame_number, streamID);
2935                                m = mPendingFrameDropList.erase(m);
2936                                break;
2937                            }
2938                        }
2939                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
2940                        result_buffers[result_buffers_idx++] = *(j->buffer);
2941                        free(j->buffer);
2942                        j->buffer = NULL;
2943                    }
2944                }
2945                result.output_buffers = result_buffers;
2946                mCallbackOps->process_capture_result(mCallbackOps, &result);
2947                LOGD("meta frame_number = %u, capture_time = %lld",
2948                        result.frame_number, i->timestamp);
2949                free_camera_metadata((camera_metadata_t *)result.result);
2950                delete[] result_buffers;
2951            }else {
2952                LOGE("Fatal error: out of memory");
2953            }
2954        } else {
2955            mCallbackOps->process_capture_result(mCallbackOps, &result);
2956            LOGD("meta frame_number = %u, capture_time = %lld",
2957                    result.frame_number, i->timestamp);
2958            free_camera_metadata((camera_metadata_t *)result.result);
2959        }
2960
2961        i = erasePendingRequest(i);
2962
2963        if (!mPendingReprocessResultList.empty()) {
2964            handlePendingReprocResults(frame_number + 1);
2965        }
2966    }
2967
2968done_metadata:
2969    for (pendingRequestIterator i = mPendingRequestsList.begin();
2970            i != mPendingRequestsList.end() ;i++) {
2971        i->pipeline_depth++;
2972    }
2973    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
2974    unblockRequestIfNecessary();
2975}
2976
2977/*===========================================================================
2978 * FUNCTION   : hdrPlusPerfLock
2979 *
2980 * DESCRIPTION: perf lock for HDR+ using custom intent
2981 *
2982 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2983 *
2984 * RETURN     : None
2985 *
2986 *==========================================================================*/
2987void QCamera3HardwareInterface::hdrPlusPerfLock(
2988        mm_camera_super_buf_t *metadata_buf)
2989{
2990    if (NULL == metadata_buf) {
2991        LOGE("metadata_buf is NULL");
2992        return;
2993    }
2994    metadata_buffer_t *metadata =
2995            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2996    int32_t *p_frame_number_valid =
2997            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2998    uint32_t *p_frame_number =
2999            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3000
3001    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3002        LOGE("%s: Invalid metadata", __func__);
3003        return;
3004    }
3005
3006    //acquire perf lock for 5 sec after the last HDR frame is captured
3007    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3008        if ((p_frame_number != NULL) &&
3009                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3010            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3011        }
3012    }
3013
3014    //release lock after perf lock timer is expired. If lock is already released,
3015    //isTimerReset returns false
3016    if (m_perfLock.isTimerReset()) {
3017        mLastCustIntentFrmNum = -1;
3018        m_perfLock.lock_rel_timed();
3019    }
3020}
3021
3022/*===========================================================================
3023 * FUNCTION   : handleInputBufferWithLock
3024 *
3025 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3026 *
3027 * PARAMETERS : @frame_number: frame number of the input buffer
3028 *
3029 * RETURN     :
3030 *
3031 *==========================================================================*/
3032void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3033{
3034    ATRACE_CALL();
3035    pendingRequestIterator i = mPendingRequestsList.begin();
3036    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3037        i++;
3038    }
3039    if (i != mPendingRequestsList.end() && i->input_buffer) {
3040        //found the right request
3041        if (!i->shutter_notified) {
3042            CameraMetadata settings;
3043            camera3_notify_msg_t notify_msg;
3044            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3045            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3046            if(i->settings) {
3047                settings = i->settings;
3048                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3049                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3050                } else {
3051                    LOGE("No timestamp in input settings! Using current one.");
3052                }
3053            } else {
3054                LOGE("Input settings missing!");
3055            }
3056
3057            notify_msg.type = CAMERA3_MSG_SHUTTER;
3058            notify_msg.message.shutter.frame_number = frame_number;
3059            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3060            mCallbackOps->notify(mCallbackOps, &notify_msg);
3061            i->shutter_notified = true;
3062            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3063                        i->frame_number, notify_msg.message.shutter.timestamp);
3064        }
3065
3066        if (i->input_buffer->release_fence != -1) {
3067           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3068           close(i->input_buffer->release_fence);
3069           if (rc != OK) {
3070               LOGE("input buffer sync wait failed %d", rc);
3071           }
3072        }
3073
3074        camera3_capture_result result;
3075        memset(&result, 0, sizeof(camera3_capture_result));
3076        result.frame_number = frame_number;
3077        result.result = i->settings;
3078        result.input_buffer = i->input_buffer;
3079        result.partial_result = PARTIAL_RESULT_COUNT;
3080
3081        mCallbackOps->process_capture_result(mCallbackOps, &result);
3082        LOGD("Input request metadata and input buffer frame_number = %u",
3083                        i->frame_number);
3084        i = erasePendingRequest(i);
3085    } else {
3086        LOGE("Could not find input request for frame number %d", frame_number);
3087    }
3088}
3089
3090/*===========================================================================
3091 * FUNCTION   : handleBufferWithLock
3092 *
3093 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3094 *
3095 * PARAMETERS : @buffer: image buffer for the callback
3096 *              @frame_number: frame number of the image buffer
3097 *
3098 * RETURN     :
3099 *
3100 *==========================================================================*/
3101void QCamera3HardwareInterface::handleBufferWithLock(
3102    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3103{
3104    ATRACE_CALL();
3105    /* Nothing to be done during error state */
3106    if ((ERROR == mState) || (DEINIT == mState)) {
3107        return;
3108    }
3109    if (mFlushPerf) {
3110        handleBuffersDuringFlushLock(buffer);
3111        return;
3112    }
3113    //not in flush
3114    // If the frame number doesn't exist in the pending request list,
3115    // directly send the buffer to the frameworks, and update pending buffers map
3116    // Otherwise, book-keep the buffer.
3117    pendingRequestIterator i = mPendingRequestsList.begin();
3118    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3119        i++;
3120    }
3121    if (i == mPendingRequestsList.end()) {
3122        // Verify all pending requests frame_numbers are greater
3123        for (pendingRequestIterator j = mPendingRequestsList.begin();
3124                j != mPendingRequestsList.end(); j++) {
3125            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3126                LOGW("Error: pending live frame number %d is smaller than %d",
3127                         j->frame_number, frame_number);
3128            }
3129        }
3130        camera3_capture_result_t result;
3131        memset(&result, 0, sizeof(camera3_capture_result_t));
3132        result.result = NULL;
3133        result.frame_number = frame_number;
3134        result.num_output_buffers = 1;
3135        result.partial_result = 0;
3136        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3137                m != mPendingFrameDropList.end(); m++) {
3138            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3139            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3140            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3141                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3142                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3143                         frame_number, streamID);
3144                m = mPendingFrameDropList.erase(m);
3145                break;
3146            }
3147        }
3148        result.output_buffers = buffer;
3149        LOGH("result frame_number = %d, buffer = %p",
3150                 frame_number, buffer->buffer);
3151
3152        mPendingBuffersMap.removeBuf(buffer->buffer);
3153
3154        mCallbackOps->process_capture_result(mCallbackOps, &result);
3155    } else {
3156        if (i->input_buffer) {
3157            CameraMetadata settings;
3158            camera3_notify_msg_t notify_msg;
3159            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3160            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3161            if(i->settings) {
3162                settings = i->settings;
3163                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3164                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3165                } else {
3166                    LOGW("No timestamp in input settings! Using current one.");
3167                }
3168            } else {
3169                LOGE("Input settings missing!");
3170            }
3171
3172            notify_msg.type = CAMERA3_MSG_SHUTTER;
3173            notify_msg.message.shutter.frame_number = frame_number;
3174            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3175
3176            if (i->input_buffer->release_fence != -1) {
3177               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3178               close(i->input_buffer->release_fence);
3179               if (rc != OK) {
3180                   LOGE("input buffer sync wait failed %d", rc);
3181               }
3182            }
3183            mPendingBuffersMap.removeBuf(buffer->buffer);
3184
3185            bool notifyNow = true;
3186            for (pendingRequestIterator j = mPendingRequestsList.begin();
3187                    j != mPendingRequestsList.end(); j++) {
3188                if (j->frame_number < frame_number) {
3189                    notifyNow = false;
3190                    break;
3191                }
3192            }
3193
3194            if (notifyNow) {
3195                camera3_capture_result result;
3196                memset(&result, 0, sizeof(camera3_capture_result));
3197                result.frame_number = frame_number;
3198                result.result = i->settings;
3199                result.input_buffer = i->input_buffer;
3200                result.num_output_buffers = 1;
3201                result.output_buffers = buffer;
3202                result.partial_result = PARTIAL_RESULT_COUNT;
3203
3204                mCallbackOps->notify(mCallbackOps, &notify_msg);
3205                mCallbackOps->process_capture_result(mCallbackOps, &result);
3206                LOGD("Notify reprocess now %d!", frame_number);
3207                i = erasePendingRequest(i);
3208            } else {
3209                // Cache reprocess result for later
3210                PendingReprocessResult pendingResult;
3211                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3212                pendingResult.notify_msg = notify_msg;
3213                pendingResult.buffer = *buffer;
3214                pendingResult.frame_number = frame_number;
3215                mPendingReprocessResultList.push_back(pendingResult);
3216                LOGD("Cache reprocess result %d!", frame_number);
3217            }
3218        } else {
3219            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3220                j != i->buffers.end(); j++) {
3221                if (j->stream == buffer->stream) {
3222                    if (j->buffer != NULL) {
3223                        LOGE("Error: buffer is already set");
3224                    } else {
3225                        j->buffer = (camera3_stream_buffer_t *)malloc(
3226                            sizeof(camera3_stream_buffer_t));
3227                        *(j->buffer) = *buffer;
3228                        LOGH("cache buffer %p at result frame_number %u",
3229                             buffer->buffer, frame_number);
3230                    }
3231                }
3232            }
3233        }
3234    }
3235}
3236
3237/*===========================================================================
3238 * FUNCTION   : unblockRequestIfNecessary
3239 *
3240 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3241 *              that mMutex is held when this function is called.
3242 *
3243 * PARAMETERS :
3244 *
3245 * RETURN     :
3246 *
3247 *==========================================================================*/
3248void QCamera3HardwareInterface::unblockRequestIfNecessary()
3249{
3250   // Unblock process_capture_request
3251   pthread_cond_signal(&mRequestCond);
3252}
3253
3254
3255/*===========================================================================
3256 * FUNCTION   : processCaptureRequest
3257 *
3258 * DESCRIPTION: process a capture request from camera service
3259 *
3260 * PARAMETERS :
3261 *   @request : request from framework to process
3262 *
3263 * RETURN     :
3264 *
3265 *==========================================================================*/
3266int QCamera3HardwareInterface::processCaptureRequest(
3267                    camera3_capture_request_t *request)
3268{
3269    ATRACE_CALL();
3270    int rc = NO_ERROR;
3271    int32_t request_id;
3272    CameraMetadata meta;
3273    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3274    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3275    bool isVidBufRequested = false;
3276    camera3_stream_buffer_t *pInputBuffer = NULL;
3277
3278    pthread_mutex_lock(&mMutex);
3279
3280    // Validate current state
3281    switch (mState) {
3282        case CONFIGURED:
3283        case STARTED:
3284            /* valid state */
3285            break;
3286
3287        case ERROR:
3288            pthread_mutex_unlock(&mMutex);
3289            handleCameraDeviceError();
3290            return -ENODEV;
3291
3292        default:
3293            LOGE("Invalid state %d", mState);
3294            pthread_mutex_unlock(&mMutex);
3295            return -ENODEV;
3296    }
3297
3298    rc = validateCaptureRequest(request);
3299    if (rc != NO_ERROR) {
3300        LOGE("incoming request is not valid");
3301        pthread_mutex_unlock(&mMutex);
3302        return rc;
3303    }
3304
3305    meta = request->settings;
3306
3307    // For first capture request, send capture intent, and
3308    // stream on all streams
3309    if (mState == CONFIGURED) {
3310        // send an unconfigure to the backend so that the isp
3311        // resources are deallocated
3312        if (!mFirstConfiguration) {
3313            cam_stream_size_info_t stream_config_info;
3314            int32_t hal_version = CAM_HAL_V3;
3315            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3316            stream_config_info.buffer_info.min_buffers =
3317                    MIN_INFLIGHT_REQUESTS;
3318            stream_config_info.buffer_info.max_buffers =
3319                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3320            clear_metadata_buffer(mParameters);
3321            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3322                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3323            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3324                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3325            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3326                    mParameters);
3327            if (rc < 0) {
3328                LOGE("set_parms for unconfigure failed");
3329                pthread_mutex_unlock(&mMutex);
3330                return rc;
3331            }
3332        }
3333        m_perfLock.lock_acq();
3334        /* get eis information for stream configuration */
3335        cam_is_type_t is_type;
3336        char is_type_value[PROPERTY_VALUE_MAX];
3337        property_get("persist.camera.is_type", is_type_value, "0");
3338        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3339
3340        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3341            int32_t hal_version = CAM_HAL_V3;
3342            uint8_t captureIntent =
3343                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3344            mCaptureIntent = captureIntent;
3345            clear_metadata_buffer(mParameters);
3346            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3347            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3348        }
3349
3350        //If EIS is enabled, turn it on for video
3351        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3352        int32_t vsMode;
3353        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3354        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3355            rc = BAD_VALUE;
3356        }
3357
3358        //IS type will be 0 unless EIS is supported. If EIS is supported
3359        //it could either be 1 or 4 depending on the stream and video size
3360        if (setEis) {
3361            if (!m_bEisSupportedSize) {
3362                is_type = IS_TYPE_DIS;
3363            } else {
3364                is_type = IS_TYPE_EIS_2_0;
3365            }
3366            mStreamConfigInfo.is_type = is_type;
3367        } else {
3368            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3369        }
3370
3371        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3372                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3373        int32_t tintless_value = 1;
3374        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3375                CAM_INTF_PARM_TINTLESS, tintless_value);
3376        //Disable CDS for HFR mode or if DIS/EIS is on.
3377        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3378        //after every configure_stream
3379        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3380                (m_bIsVideo)) {
3381            int32_t cds = CAM_CDS_MODE_OFF;
3382            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3383                    CAM_INTF_PARM_CDS_MODE, cds))
3384                LOGE("Failed to disable CDS for HFR mode");
3385
3386        }
3387        setMobicat();
3388
3389        /* Set fps and hfr mode while sending meta stream info so that sensor
3390         * can configure appropriate streaming mode */
3391        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3392        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3393            rc = setHalFpsRange(meta, mParameters);
3394            if (rc != NO_ERROR) {
3395                LOGE("setHalFpsRange failed");
3396            }
3397        }
3398        if (meta.exists(ANDROID_CONTROL_MODE)) {
3399            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3400            rc = extractSceneMode(meta, metaMode, mParameters);
3401            if (rc != NO_ERROR) {
3402                LOGE("extractSceneMode failed");
3403            }
3404        }
3405
3406        //TODO: validate the arguments, HSV scenemode should have only the
3407        //advertised fps ranges
3408
3409        /*set the capture intent, hal version, tintless, stream info,
3410         *and disenable parameters to the backend*/
3411        LOGD("set_parms META_STREAM_INFO " );
3412        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3413            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3414                    "Format:%d",
3415                    mStreamConfigInfo.type[i],
3416                    mStreamConfigInfo.stream_sizes[i].width,
3417                    mStreamConfigInfo.stream_sizes[i].height,
3418                    mStreamConfigInfo.postprocess_mask[i],
3419                    mStreamConfigInfo.format[i]);
3420        }
3421        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3422                    mParameters);
3423        if (rc < 0) {
3424            LOGE("set_parms failed for hal version, stream info");
3425        }
3426
3427        cam_dimension_t sensor_dim;
3428        memset(&sensor_dim, 0, sizeof(sensor_dim));
3429        rc = getSensorOutputSize(sensor_dim);
3430        if (rc != NO_ERROR) {
3431            LOGE("Failed to get sensor output size");
3432            pthread_mutex_unlock(&mMutex);
3433            goto error_exit;
3434        }
3435
3436        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3437                gCamCapability[mCameraId]->active_array_size.height,
3438                sensor_dim.width, sensor_dim.height);
3439
3440        /* Set batchmode before initializing channel. Since registerBuffer
3441         * internally initializes some of the channels, better set batchmode
3442         * even before first register buffer */
3443        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3444            it != mStreamInfo.end(); it++) {
3445            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3446            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3447                    && mBatchSize) {
3448                rc = channel->setBatchSize(mBatchSize);
3449                //Disable per frame map unmap for HFR/batchmode case
3450                rc |= channel->setPerFrameMapUnmap(false);
3451                if (NO_ERROR != rc) {
3452                    LOGE("Channel init failed %d", rc);
3453                    pthread_mutex_unlock(&mMutex);
3454                    goto error_exit;
3455                }
3456            }
3457        }
3458
3459        //First initialize all streams
3460        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3461            it != mStreamInfo.end(); it++) {
3462            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3463            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3464               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3465               setEis)
3466                rc = channel->initialize(is_type);
3467            else {
3468                rc = channel->initialize(IS_TYPE_NONE);
3469            }
3470            if (NO_ERROR != rc) {
3471                LOGE("Channel initialization failed %d", rc);
3472                pthread_mutex_unlock(&mMutex);
3473                goto error_exit;
3474            }
3475        }
3476
3477        if (mRawDumpChannel) {
3478            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3479            if (rc != NO_ERROR) {
3480                LOGE("Error: Raw Dump Channel init failed");
3481                pthread_mutex_unlock(&mMutex);
3482                goto error_exit;
3483            }
3484        }
3485        if (mSupportChannel) {
3486            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3487            if (rc < 0) {
3488                LOGE("Support channel initialization failed");
3489                pthread_mutex_unlock(&mMutex);
3490                goto error_exit;
3491            }
3492        }
3493        if (mAnalysisChannel) {
3494            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3495            if (rc < 0) {
3496                LOGE("Analysis channel initialization failed");
3497                pthread_mutex_unlock(&mMutex);
3498                goto error_exit;
3499            }
3500        }
3501        if (mDummyBatchChannel) {
3502            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3503            if (rc < 0) {
3504                LOGE("mDummyBatchChannel setBatchSize failed");
3505                pthread_mutex_unlock(&mMutex);
3506                goto error_exit;
3507            }
3508            rc = mDummyBatchChannel->initialize(is_type);
3509            if (rc < 0) {
3510                LOGE("mDummyBatchChannel initialization failed");
3511                pthread_mutex_unlock(&mMutex);
3512                goto error_exit;
3513            }
3514        }
3515
3516        // Set bundle info
3517        rc = setBundleInfo();
3518        if (rc < 0) {
3519            LOGE("setBundleInfo failed %d", rc);
3520            pthread_mutex_unlock(&mMutex);
3521            goto error_exit;
3522        }
3523
3524        //Then start them.
3525        LOGH("Start META Channel");
3526        rc = mMetadataChannel->start();
3527        if (rc < 0) {
3528            LOGE("META channel start failed");
3529            pthread_mutex_unlock(&mMutex);
3530            goto error_exit;
3531        }
3532
3533        if (mAnalysisChannel) {
3534            rc = mAnalysisChannel->start();
3535            if (rc < 0) {
3536                LOGE("Analysis channel start failed");
3537                mMetadataChannel->stop();
3538                pthread_mutex_unlock(&mMutex);
3539                goto error_exit;
3540            }
3541        }
3542
3543        if (mSupportChannel) {
3544            rc = mSupportChannel->start();
3545            if (rc < 0) {
3546                LOGE("Support channel start failed");
3547                mMetadataChannel->stop();
3548                /* Although support and analysis are mutually exclusive today
3549                   adding it in anycase for future proofing */
3550                if (mAnalysisChannel) {
3551                    mAnalysisChannel->stop();
3552                }
3553                pthread_mutex_unlock(&mMutex);
3554                goto error_exit;
3555            }
3556        }
3557        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3558            it != mStreamInfo.end(); it++) {
3559            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3560            LOGH("Start Processing Channel mask=%d",
3561                     channel->getStreamTypeMask());
3562            rc = channel->start();
3563            if (rc < 0) {
3564                LOGE("channel start failed");
3565                pthread_mutex_unlock(&mMutex);
3566                goto error_exit;
3567            }
3568        }
3569
3570        if (mRawDumpChannel) {
3571            LOGD("Starting raw dump stream");
3572            rc = mRawDumpChannel->start();
3573            if (rc != NO_ERROR) {
3574                LOGE("Error Starting Raw Dump Channel");
3575                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3576                      it != mStreamInfo.end(); it++) {
3577                    QCamera3Channel *channel =
3578                        (QCamera3Channel *)(*it)->stream->priv;
3579                    LOGH("Stopping Processing Channel mask=%d",
3580                        channel->getStreamTypeMask());
3581                    channel->stop();
3582                }
3583                if (mSupportChannel)
3584                    mSupportChannel->stop();
3585                if (mAnalysisChannel) {
3586                    mAnalysisChannel->stop();
3587                }
3588                mMetadataChannel->stop();
3589                pthread_mutex_unlock(&mMutex);
3590                goto error_exit;
3591            }
3592        }
3593
3594        if (mChannelHandle) {
3595
3596            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3597                    mChannelHandle);
3598            if (rc != NO_ERROR) {
3599                LOGE("start_channel failed %d", rc);
3600                pthread_mutex_unlock(&mMutex);
3601                goto error_exit;
3602            }
3603        }
3604
3605
3606        goto no_error;
3607error_exit:
3608        m_perfLock.lock_rel();
3609        return rc;
3610no_error:
3611        m_perfLock.lock_rel();
3612
3613        mWokenUpByDaemon = false;
3614        mPendingLiveRequest = 0;
3615        mFirstConfiguration = false;
3616        enablePowerHint();
3617    }
3618
3619    uint32_t frameNumber = request->frame_number;
3620    cam_stream_ID_t streamID;
3621
3622    if (mFlushPerf) {
3623        //we cannot accept any requests during flush
3624        LOGE("process_capture_request cannot proceed during flush");
3625        pthread_mutex_unlock(&mMutex);
3626        return NO_ERROR; //should return an error
3627    }
3628
3629    if (meta.exists(ANDROID_REQUEST_ID)) {
3630        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3631        mCurrentRequestId = request_id;
3632        LOGD("Received request with id: %d", request_id);
3633    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3634        LOGE("Unable to find request id field, \
3635                & no previous id available");
3636        pthread_mutex_unlock(&mMutex);
3637        return NAME_NOT_FOUND;
3638    } else {
3639        LOGD("Re-using old request id");
3640        request_id = mCurrentRequestId;
3641    }
3642
3643    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3644                                    request->num_output_buffers,
3645                                    request->input_buffer,
3646                                    frameNumber);
3647    // Acquire all request buffers first
3648    streamID.num_streams = 0;
3649    int blob_request = 0;
3650    uint32_t snapshotStreamId = 0;
3651    for (size_t i = 0; i < request->num_output_buffers; i++) {
3652        const camera3_stream_buffer_t& output = request->output_buffers[i];
3653        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3654
3655        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3656            //Call function to store local copy of jpeg data for encode params.
3657            blob_request = 1;
3658            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3659        }
3660
3661        if (output.acquire_fence != -1) {
3662           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3663           close(output.acquire_fence);
3664           if (rc != OK) {
3665              LOGE("sync wait failed %d", rc);
3666              pthread_mutex_unlock(&mMutex);
3667              return rc;
3668           }
3669        }
3670
3671        streamID.streamID[streamID.num_streams] =
3672            channel->getStreamID(channel->getStreamTypeMask());
3673        streamID.num_streams++;
3674
3675        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3676            isVidBufRequested = true;
3677        }
3678    }
3679
3680    if (blob_request) {
3681        KPI_ATRACE_INT("SNAPSHOT", 1);
3682    }
3683    if (blob_request && mRawDumpChannel) {
3684        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3685        streamID.streamID[streamID.num_streams] =
3686            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3687        streamID.num_streams++;
3688    }
3689
3690    if(request->input_buffer == NULL) {
3691        /* Parse the settings:
3692         * - For every request in NORMAL MODE
3693         * - For every request in HFR mode during preview only case
3694         * - For first request of every batch in HFR mode during video
3695         * recording. In batchmode the same settings except frame number is
3696         * repeated in each request of the batch.
3697         */
3698        if (!mBatchSize ||
3699           (mBatchSize && !isVidBufRequested) ||
3700           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3701            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3702            if (rc < 0) {
3703                LOGE("fail to set frame parameters");
3704                pthread_mutex_unlock(&mMutex);
3705                return rc;
3706            }
3707        }
3708        /* For batchMode HFR, setFrameParameters is not called for every
3709         * request. But only frame number of the latest request is parsed.
3710         * Keep track of first and last frame numbers in a batch so that
3711         * metadata for the frame numbers of batch can be duplicated in
3712         * handleBatchMetadta */
3713        if (mBatchSize) {
3714            if (!mToBeQueuedVidBufs) {
3715                //start of the batch
3716                mFirstFrameNumberInBatch = request->frame_number;
3717            }
3718            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3719                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3720                LOGE("Failed to set the frame number in the parameters");
3721                return BAD_VALUE;
3722            }
3723        }
3724        if (mNeedSensorRestart) {
3725            /* Unlock the mutex as restartSensor waits on the channels to be
3726             * stopped, which in turn calls stream callback functions -
3727             * handleBufferWithLock and handleMetadataWithLock */
3728            pthread_mutex_unlock(&mMutex);
3729            rc = dynamicUpdateMetaStreamInfo();
3730            if (rc != NO_ERROR) {
3731                LOGE("Restarting the sensor failed");
3732                return BAD_VALUE;
3733            }
3734            mNeedSensorRestart = false;
3735            pthread_mutex_lock(&mMutex);
3736        }
3737    } else {
3738
3739        if (request->input_buffer->acquire_fence != -1) {
3740           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3741           close(request->input_buffer->acquire_fence);
3742           if (rc != OK) {
3743              LOGE("input buffer sync wait failed %d", rc);
3744              pthread_mutex_unlock(&mMutex);
3745              return rc;
3746           }
3747        }
3748    }
3749
3750    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3751        mLastCustIntentFrmNum = frameNumber;
3752    }
3753    /* Update pending request list and pending buffers map */
3754    PendingRequestInfo pendingRequest;
3755    pendingRequestIterator latestRequest;
3756    pendingRequest.frame_number = frameNumber;
3757    pendingRequest.num_buffers = request->num_output_buffers;
3758    pendingRequest.request_id = request_id;
3759    pendingRequest.blob_request = blob_request;
3760    pendingRequest.timestamp = 0;
3761    pendingRequest.bUrgentReceived = 0;
3762    if (request->input_buffer) {
3763        pendingRequest.input_buffer =
3764                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3765        *(pendingRequest.input_buffer) = *(request->input_buffer);
3766        pInputBuffer = pendingRequest.input_buffer;
3767    } else {
3768       pendingRequest.input_buffer = NULL;
3769       pInputBuffer = NULL;
3770    }
3771
3772    pendingRequest.pipeline_depth = 0;
3773    pendingRequest.partial_result_cnt = 0;
3774    extractJpegMetadata(mCurJpegMeta, request);
3775    pendingRequest.jpegMetadata = mCurJpegMeta;
3776    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3777    pendingRequest.shutter_notified = false;
3778
3779    //extract capture intent
3780    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3781        mCaptureIntent =
3782                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3783    }
3784    pendingRequest.capture_intent = mCaptureIntent;
3785
3786    //extract CAC info
3787    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
3788        mCacMode =
3789                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
3790    }
3791    pendingRequest.fwkCacMode = mCacMode;
3792
3793    PendingBuffersInRequest bufsForCurRequest;
3794    bufsForCurRequest.frame_number = frameNumber;
3795    // Mark current timestamp for the new request
3796    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
3797
3798    for (size_t i = 0; i < request->num_output_buffers; i++) {
3799        RequestedBufferInfo requestedBuf;
3800        memset(&requestedBuf, 0, sizeof(requestedBuf));
3801        requestedBuf.stream = request->output_buffers[i].stream;
3802        requestedBuf.buffer = NULL;
3803        pendingRequest.buffers.push_back(requestedBuf);
3804
3805        // Add to buffer handle the pending buffers list
3806        PendingBufferInfo bufferInfo;
3807        bufferInfo.buffer = request->output_buffers[i].buffer;
3808        bufferInfo.stream = request->output_buffers[i].stream;
3809        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
3810        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3811        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3812            frameNumber, bufferInfo.buffer,
3813            channel->getStreamTypeMask(), bufferInfo.stream->format);
3814    }
3815    // Add this request packet into mPendingBuffersMap
3816    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
3817    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
3818        mPendingBuffersMap.get_num_overall_buffers());
3819
3820    latestRequest = mPendingRequestsList.insert(
3821            mPendingRequestsList.end(), pendingRequest);
3822    if(mFlush) {
3823        pthread_mutex_unlock(&mMutex);
3824        return NO_ERROR;
3825    }
3826
3827    // Notify metadata channel we receive a request
3828    mMetadataChannel->request(NULL, frameNumber);
3829
3830    if(request->input_buffer != NULL){
3831        LOGD("Input request, frame_number %d", frameNumber);
3832        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3833        if (NO_ERROR != rc) {
3834            LOGE("fail to set reproc parameters");
3835            pthread_mutex_unlock(&mMutex);
3836            return rc;
3837        }
3838    }
3839
3840    // Call request on other streams
3841    uint32_t streams_need_metadata = 0;
3842    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3843    for (size_t i = 0; i < request->num_output_buffers; i++) {
3844        const camera3_stream_buffer_t& output = request->output_buffers[i];
3845        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3846
3847        if (channel == NULL) {
3848            LOGW("invalid channel pointer for stream");
3849            continue;
3850        }
3851
3852        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3853            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
3854                      output.buffer, request->input_buffer, frameNumber);
3855            if(request->input_buffer != NULL){
3856                rc = channel->request(output.buffer, frameNumber,
3857                        pInputBuffer, &mReprocMeta);
3858                if (rc < 0) {
3859                    LOGE("Fail to request on picture channel");
3860                    pthread_mutex_unlock(&mMutex);
3861                    return rc;
3862                }
3863            } else {
3864                LOGD("snapshot request with buffer %p, frame_number %d",
3865                         output.buffer, frameNumber);
3866                if (!request->settings) {
3867                    rc = channel->request(output.buffer, frameNumber,
3868                            NULL, mPrevParameters);
3869                } else {
3870                    rc = channel->request(output.buffer, frameNumber,
3871                            NULL, mParameters);
3872                }
3873                if (rc < 0) {
3874                    LOGE("Fail to request on picture channel");
3875                    pthread_mutex_unlock(&mMutex);
3876                    return rc;
3877                }
3878                pendingBufferIter->need_metadata = true;
3879                streams_need_metadata++;
3880            }
3881        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3882            bool needMetadata = false;
3883
3884            if (m_perfLock.isPerfLockTimedAcquired()) {
3885                if (m_perfLock.isTimerReset())
3886                {
3887                    m_perfLock.lock_rel_timed();
3888                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3889                }
3890            } else {
3891                m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3892            }
3893
3894            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3895            rc = yuvChannel->request(output.buffer, frameNumber,
3896                    pInputBuffer,
3897                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3898            if (rc < 0) {
3899                LOGE("Fail to request on YUV channel");
3900                pthread_mutex_unlock(&mMutex);
3901                return rc;
3902            }
3903            pendingBufferIter->need_metadata = needMetadata;
3904            if (needMetadata)
3905                streams_need_metadata += 1;
3906            LOGD("calling YUV channel request, need_metadata is %d",
3907                     needMetadata);
3908        } else {
3909            LOGD("request with buffer %p, frame_number %d",
3910                  output.buffer, frameNumber);
3911            /* Set perf lock for API-2 zsl */
3912            if (IS_USAGE_ZSL(output.stream->usage)) {
3913                if (m_perfLock.isPerfLockTimedAcquired()) {
3914                    if (m_perfLock.isTimerReset())
3915                    {
3916                        m_perfLock.lock_rel_timed();
3917                        m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3918                    }
3919                } else {
3920                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
3921                }
3922            }
3923
3924            rc = channel->request(output.buffer, frameNumber);
3925            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3926                    && mBatchSize) {
3927                mToBeQueuedVidBufs++;
3928                if (mToBeQueuedVidBufs == mBatchSize) {
3929                    channel->queueBatchBuf();
3930                }
3931            }
3932            if (rc < 0) {
3933                LOGE("request failed");
3934                pthread_mutex_unlock(&mMutex);
3935                return rc;
3936            }
3937        }
3938        pendingBufferIter++;
3939    }
3940
3941    //If 2 streams have need_metadata set to true, fail the request, unless
3942    //we copy/reference count the metadata buffer
3943    if (streams_need_metadata > 1) {
3944        LOGE("not supporting request in which two streams requires"
3945                " 2 HAL metadata for reprocessing");
3946        pthread_mutex_unlock(&mMutex);
3947        return -EINVAL;
3948    }
3949
3950    if(request->input_buffer == NULL) {
3951        /* Set the parameters to backend:
3952         * - For every request in NORMAL MODE
3953         * - For every request in HFR mode during preview only case
3954         * - Once every batch in HFR mode during video recording
3955         */
3956        if (!mBatchSize ||
3957           (mBatchSize && !isVidBufRequested) ||
3958           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3959            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3960                     mBatchSize, isVidBufRequested,
3961                    mToBeQueuedVidBufs);
3962            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3963                    mParameters);
3964            if (rc < 0) {
3965                LOGE("set_parms failed");
3966            }
3967            /* reset to zero coz, the batch is queued */
3968            mToBeQueuedVidBufs = 0;
3969            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3970        }
3971        mPendingLiveRequest++;
3972    }
3973
3974    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3975
3976    mState = STARTED;
3977    // Added a timed condition wait
3978    struct timespec ts;
3979    uint8_t isValidTimeout = 1;
3980    rc = clock_gettime(CLOCK_REALTIME, &ts);
3981    if (rc < 0) {
3982      isValidTimeout = 0;
3983      LOGE("Error reading the real time clock!!");
3984    }
3985    else {
3986      // Make timeout as 5 sec for request to be honored
3987      ts.tv_sec += 5;
3988    }
3989    //Block on conditional variable
3990    if (mBatchSize) {
3991        /* For HFR, more buffers are dequeued upfront to improve the performance */
3992        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3993        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3994    }
3995    if (m_perfLock.isPerfLockTimedAcquired() && m_perfLock.isTimerReset())
3996        m_perfLock.lock_rel_timed();
3997
3998    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
3999            (mState != ERROR) && (mState != DEINIT)) {
4000        if (!isValidTimeout) {
4001            LOGD("Blocking on conditional wait");
4002            pthread_cond_wait(&mRequestCond, &mMutex);
4003        }
4004        else {
4005            LOGD("Blocking on timed conditional wait");
4006            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4007            if (rc == ETIMEDOUT) {
4008                rc = -ENODEV;
4009                LOGE("Unblocked on timeout!!!!");
4010                break;
4011            }
4012        }
4013        LOGD("Unblocked");
4014        if (mWokenUpByDaemon) {
4015            mWokenUpByDaemon = false;
4016            if (mPendingLiveRequest < maxInFlightRequests)
4017                break;
4018        }
4019    }
4020    pthread_mutex_unlock(&mMutex);
4021
4022    return rc;
4023}
4024
4025/*===========================================================================
4026 * FUNCTION   : dump
4027 *
4028 * DESCRIPTION:
4029 *
4030 * PARAMETERS :
4031 *
4032 *
4033 * RETURN     :
4034 *==========================================================================*/
4035void QCamera3HardwareInterface::dump(int fd)
4036{
4037    pthread_mutex_lock(&mMutex);
4038    dprintf(fd, "\n Camera HAL3 information Begin \n");
4039
4040    dprintf(fd, "\nNumber of pending requests: %zu \n",
4041        mPendingRequestsList.size());
4042    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4043    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4044    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4045    for(pendingRequestIterator i = mPendingRequestsList.begin();
4046            i != mPendingRequestsList.end(); i++) {
4047        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4048        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4049        i->input_buffer);
4050    }
4051    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4052                mPendingBuffersMap.get_num_overall_buffers());
4053    dprintf(fd, "-------+------------------\n");
4054    dprintf(fd, " Frame | Stream type mask \n");
4055    dprintf(fd, "-------+------------------\n");
4056    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4057        for(auto &j : req.mPendingBufferList) {
4058            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4059            dprintf(fd, " %5d | %11d \n",
4060                    req.frame_number, channel->getStreamTypeMask());
4061        }
4062    }
4063    dprintf(fd, "-------+------------------\n");
4064
4065    dprintf(fd, "\nPending frame drop list: %zu\n",
4066        mPendingFrameDropList.size());
4067    dprintf(fd, "-------+-----------\n");
4068    dprintf(fd, " Frame | Stream ID \n");
4069    dprintf(fd, "-------+-----------\n");
4070    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4071        i != mPendingFrameDropList.end(); i++) {
4072        dprintf(fd, " %5d | %9d \n",
4073            i->frame_number, i->stream_ID);
4074    }
4075    dprintf(fd, "-------+-----------\n");
4076
4077    dprintf(fd, "\n Camera HAL3 information End \n");
4078
4079    /* use dumpsys media.camera as trigger to send update debug level event */
4080    mUpdateDebugLevel = true;
4081    pthread_mutex_unlock(&mMutex);
4082    return;
4083}
4084
4085/*===========================================================================
4086 * FUNCTION   : flush
4087 *
4088 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4089 *              conditionally restarts channels
4090 *
4091 * PARAMETERS :
4092 *  @ restartChannels: re-start all channels
4093 *
4094 *
4095 * RETURN     :
4096 *          0 on success
4097 *          Error code on failure
4098 *==========================================================================*/
4099int QCamera3HardwareInterface::flush(bool restartChannels)
4100{
4101    KPI_ATRACE_CALL();
4102    int32_t rc = NO_ERROR;
4103
4104    LOGD("Unblocking Process Capture Request");
4105    pthread_mutex_lock(&mMutex);
4106    mFlush = true;
4107    pthread_mutex_unlock(&mMutex);
4108
4109    rc = stopAllChannels();
4110    if (rc < 0) {
4111        LOGE("stopAllChannels failed");
4112        return rc;
4113    }
4114    if (mChannelHandle) {
4115        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4116                mChannelHandle);
4117    }
4118
4119    // Reset bundle info
4120    rc = setBundleInfo();
4121    if (rc < 0) {
4122        LOGE("setBundleInfo failed %d", rc);
4123        return rc;
4124    }
4125
4126    // Mutex Lock
4127    pthread_mutex_lock(&mMutex);
4128
4129    // Unblock process_capture_request
4130    mPendingLiveRequest = 0;
4131    pthread_cond_signal(&mRequestCond);
4132
4133    rc = notifyErrorForPendingRequests();
4134    if (rc < 0) {
4135        LOGE("notifyErrorForPendingRequests failed");
4136        pthread_mutex_unlock(&mMutex);
4137        return rc;
4138    }
4139
4140    mFlush = false;
4141
4142    // Start the Streams/Channels
4143    if (restartChannels) {
4144        rc = startAllChannels();
4145        if (rc < 0) {
4146            LOGE("startAllChannels failed");
4147            pthread_mutex_unlock(&mMutex);
4148            return rc;
4149        }
4150    }
4151
4152    if (mChannelHandle) {
4153        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4154                    mChannelHandle);
4155        if (rc < 0) {
4156            LOGE("start_channel failed");
4157            pthread_mutex_unlock(&mMutex);
4158            return rc;
4159        }
4160    }
4161
4162    pthread_mutex_unlock(&mMutex);
4163
4164    return 0;
4165}
4166
4167/*===========================================================================
4168 * FUNCTION   : flushPerf
4169 *
4170 * DESCRIPTION: This is the performance optimization version of flush that does
4171 *              not use stream off, rather flushes the system
4172 *
4173 * PARAMETERS :
4174 *
4175 *
4176 * RETURN     : 0 : success
4177 *              -EINVAL: input is malformed (device is not valid)
4178 *              -ENODEV: if the device has encountered a serious error
4179 *==========================================================================*/
4180int QCamera3HardwareInterface::flushPerf()
4181{
4182    ATRACE_CALL();
4183    int32_t rc = 0;
4184    struct timespec timeout;
4185    bool timed_wait = false;
4186
4187    pthread_mutex_lock(&mMutex);
4188    mFlushPerf = true;
4189    mPendingBuffersMap.numPendingBufsAtFlush =
4190        mPendingBuffersMap.get_num_overall_buffers();
4191    LOGD("Calling flush. Wait for %d buffers to return",
4192        mPendingBuffersMap.numPendingBufsAtFlush);
4193
4194    /* send the flush event to the backend */
4195    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4196    if (rc < 0) {
4197        LOGE("Error in flush: IOCTL failure");
4198        mFlushPerf = false;
4199        pthread_mutex_unlock(&mMutex);
4200        return -ENODEV;
4201    }
4202
4203    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4204        LOGD("No pending buffers in HAL, return flush");
4205        mFlushPerf = false;
4206        pthread_mutex_unlock(&mMutex);
4207        return rc;
4208    }
4209
4210    /* wait on a signal that buffers were received */
4211    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4212    if (rc < 0) {
4213        LOGE("Error reading the real time clock, cannot use timed wait");
4214    } else {
4215        timeout.tv_sec += FLUSH_TIMEOUT;
4216        timed_wait = true;
4217    }
4218
4219    //Block on conditional variable
4220    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4221        LOGD("Waiting on mBuffersCond");
4222        if (!timed_wait) {
4223            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4224            if (rc != 0) {
4225                 LOGE("pthread_cond_wait failed due to rc = %s",
4226                        strerror(rc));
4227                 break;
4228            }
4229        } else {
4230            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4231            if (rc != 0) {
4232                LOGE("pthread_cond_timedwait failed due to rc = %s",
4233                            strerror(rc));
4234                break;
4235            }
4236        }
4237    }
4238    if (rc != 0) {
4239        mFlushPerf = false;
4240        pthread_mutex_unlock(&mMutex);
4241        return -ENODEV;
4242    }
4243
4244    LOGD("Received buffers, now safe to return them");
4245
4246    //make sure the channels handle flush
4247    //currently only required for the picture channel to release snapshot resources
4248    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4249            it != mStreamInfo.end(); it++) {
4250        QCamera3Channel *channel = (*it)->channel;
4251        if (channel) {
4252            rc = channel->flush();
4253            if (rc) {
4254               LOGE("Flushing the channels failed with error %d", rc);
4255               // even though the channel flush failed we need to continue and
4256               // return the buffers we have to the framework, however the return
4257               // value will be an error
4258               rc = -ENODEV;
4259            }
4260        }
4261    }
4262
4263    /* notify the frameworks and send errored results */
4264    rc = notifyErrorForPendingRequests();
4265    if (rc < 0) {
4266        LOGE("notifyErrorForPendingRequests failed");
4267        pthread_mutex_unlock(&mMutex);
4268        return rc;
4269    }
4270
4271    //unblock process_capture_request
4272    mPendingLiveRequest = 0;
4273    unblockRequestIfNecessary();
4274
4275    mFlushPerf = false;
4276    pthread_mutex_unlock(&mMutex);
4277    LOGD ("Flush Operation complete. rc = %d", rc);
4278    return rc;
4279}
4280
4281/*===========================================================================
4282 * FUNCTION   : handleCameraDeviceError
4283 *
4284 * DESCRIPTION: This function calls internal flush and notifies the error to
4285 *              framework and updates the state variable.
4286 *
4287 * PARAMETERS : None
4288 *
4289 * RETURN     : NO_ERROR on Success
4290 *              Error code on failure
4291 *==========================================================================*/
4292int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4293{
4294    int32_t rc = NO_ERROR;
4295
4296    pthread_mutex_lock(&mMutex);
4297    if (mState != ERROR) {
4298        //if mState != ERROR, nothing to be done
4299        pthread_mutex_unlock(&mMutex);
4300        return NO_ERROR;
4301    }
4302    pthread_mutex_unlock(&mMutex);
4303
4304    rc = flush(false /* restart channels */);
4305    if (NO_ERROR != rc) {
4306        LOGE("internal flush to handle mState = ERROR failed");
4307    }
4308
4309    pthread_mutex_lock(&mMutex);
4310    mState = DEINIT;
4311    pthread_mutex_unlock(&mMutex);
4312
4313    camera3_notify_msg_t notify_msg;
4314    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4315    notify_msg.type = CAMERA3_MSG_ERROR;
4316    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4317    notify_msg.message.error.error_stream = NULL;
4318    notify_msg.message.error.frame_number = 0;
4319    mCallbackOps->notify(mCallbackOps, &notify_msg);
4320
4321    return rc;
4322}
4323
4324/*===========================================================================
4325 * FUNCTION   : captureResultCb
4326 *
4327 * DESCRIPTION: Callback handler for all capture result
4328 *              (streams, as well as metadata)
4329 *
4330 * PARAMETERS :
4331 *   @metadata : metadata information
4332 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4333 *               NULL if metadata.
4334 *
4335 * RETURN     : NONE
4336 *==========================================================================*/
4337void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4338                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4339{
4340    if (metadata_buf) {
4341        if (mBatchSize) {
4342            handleBatchMetadata(metadata_buf,
4343                    true /* free_and_bufdone_meta_buf */);
4344        } else { /* mBatchSize = 0 */
4345            hdrPlusPerfLock(metadata_buf);
4346            pthread_mutex_lock(&mMutex);
4347            handleMetadataWithLock(metadata_buf,
4348                    true /* free_and_bufdone_meta_buf */);
4349            pthread_mutex_unlock(&mMutex);
4350        }
4351    } else if (isInputBuffer) {
4352        pthread_mutex_lock(&mMutex);
4353        handleInputBufferWithLock(frame_number);
4354        pthread_mutex_unlock(&mMutex);
4355    } else {
4356        pthread_mutex_lock(&mMutex);
4357        handleBufferWithLock(buffer, frame_number);
4358        pthread_mutex_unlock(&mMutex);
4359    }
4360    return;
4361}
4362
4363/*===========================================================================
4364 * FUNCTION   : getReprocessibleOutputStreamId
4365 *
4366 * DESCRIPTION: Get source output stream id for the input reprocess stream
4367 *              based on size and format, which would be the largest
4368 *              output stream if an input stream exists.
4369 *
4370 * PARAMETERS :
4371 *   @id      : return the stream id if found
4372 *
4373 * RETURN     : int32_t type of status
4374 *              NO_ERROR  -- success
4375 *              none-zero failure code
4376 *==========================================================================*/
4377int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4378{
4379    /* check if any output or bidirectional stream with the same size and format
4380       and return that stream */
4381    if ((mInputStreamInfo.dim.width > 0) &&
4382            (mInputStreamInfo.dim.height > 0)) {
4383        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4384                it != mStreamInfo.end(); it++) {
4385
4386            camera3_stream_t *stream = (*it)->stream;
4387            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4388                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4389                    (stream->format == mInputStreamInfo.format)) {
4390                // Usage flag for an input stream and the source output stream
4391                // may be different.
4392                LOGD("Found reprocessible output stream! %p", *it);
4393                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4394                         stream->usage, mInputStreamInfo.usage);
4395
4396                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4397                if (channel != NULL && channel->mStreams[0]) {
4398                    id = channel->mStreams[0]->getMyServerID();
4399                    return NO_ERROR;
4400                }
4401            }
4402        }
4403    } else {
4404        LOGD("No input stream, so no reprocessible output stream");
4405    }
4406    return NAME_NOT_FOUND;
4407}
4408
4409/*===========================================================================
4410 * FUNCTION   : lookupFwkName
4411 *
4412 * DESCRIPTION: In case the enum is not same in fwk and backend
4413 *              make sure the parameter is correctly propogated
4414 *
4415 * PARAMETERS  :
4416 *   @arr      : map between the two enums
4417 *   @len      : len of the map
4418 *   @hal_name : name of the hal_parm to map
4419 *
4420 * RETURN     : int type of status
4421 *              fwk_name  -- success
4422 *              none-zero failure code
4423 *==========================================================================*/
4424template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4425        size_t len, halType hal_name)
4426{
4427
4428    for (size_t i = 0; i < len; i++) {
4429        if (arr[i].hal_name == hal_name) {
4430            return arr[i].fwk_name;
4431        }
4432    }
4433
4434    /* Not able to find matching framework type is not necessarily
4435     * an error case. This happens when mm-camera supports more attributes
4436     * than the frameworks do */
4437    LOGH("Cannot find matching framework type");
4438    return NAME_NOT_FOUND;
4439}
4440
4441/*===========================================================================
4442 * FUNCTION   : lookupHalName
4443 *
4444 * DESCRIPTION: In case the enum is not same in fwk and backend
4445 *              make sure the parameter is correctly propogated
4446 *
4447 * PARAMETERS  :
4448 *   @arr      : map between the two enums
4449 *   @len      : len of the map
4450 *   @fwk_name : name of the hal_parm to map
4451 *
4452 * RETURN     : int32_t type of status
4453 *              hal_name  -- success
4454 *              none-zero failure code
4455 *==========================================================================*/
4456template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4457        size_t len, fwkType fwk_name)
4458{
4459    for (size_t i = 0; i < len; i++) {
4460        if (arr[i].fwk_name == fwk_name) {
4461            return arr[i].hal_name;
4462        }
4463    }
4464
4465    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4466    return NAME_NOT_FOUND;
4467}
4468
4469/*===========================================================================
4470 * FUNCTION   : lookupProp
4471 *
4472 * DESCRIPTION: lookup a value by its name
4473 *
4474 * PARAMETERS :
4475 *   @arr     : map between the two enums
4476 *   @len     : size of the map
4477 *   @name    : name to be looked up
4478 *
4479 * RETURN     : Value if found
4480 *              CAM_CDS_MODE_MAX if not found
4481 *==========================================================================*/
4482template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4483        size_t len, const char *name)
4484{
4485    if (name) {
4486        for (size_t i = 0; i < len; i++) {
4487            if (!strcmp(arr[i].desc, name)) {
4488                return arr[i].val;
4489            }
4490        }
4491    }
4492    return CAM_CDS_MODE_MAX;
4493}
4494
4495/*===========================================================================
4496 *
4497 * DESCRIPTION:
4498 *
4499 * PARAMETERS :
4500 *   @metadata : metadata information from callback
4501 *   @timestamp: metadata buffer timestamp
4502 *   @request_id: request id
4503 *   @jpegMetadata: additional jpeg metadata
4504 *   @pprocDone: whether internal offline postprocsesing is done
4505 *
4506 * RETURN     : camera_metadata_t*
4507 *              metadata in a format specified by fwk
4508 *==========================================================================*/
4509camera_metadata_t*
4510QCamera3HardwareInterface::translateFromHalMetadata(
4511                                 metadata_buffer_t *metadata,
4512                                 nsecs_t timestamp,
4513                                 int32_t request_id,
4514                                 const CameraMetadata& jpegMetadata,
4515                                 uint8_t pipeline_depth,
4516                                 uint8_t capture_intent,
4517                                 bool pprocDone,
4518                                 uint8_t fwk_cacMode)
4519{
4520    CameraMetadata camMetadata;
4521    camera_metadata_t *resultMetadata;
4522
4523    if (jpegMetadata.entryCount())
4524        camMetadata.append(jpegMetadata);
4525
4526    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4527    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4528    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4529    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4530
4531    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4532        int64_t fwk_frame_number = *frame_number;
4533        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4534    }
4535
4536    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4537        int32_t fps_range[2];
4538        fps_range[0] = (int32_t)float_range->min_fps;
4539        fps_range[1] = (int32_t)float_range->max_fps;
4540        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4541                                      fps_range, 2);
4542        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4543             fps_range[0], fps_range[1]);
4544    }
4545
4546    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4547        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4548    }
4549
4550    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4551        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4552                METADATA_MAP_SIZE(SCENE_MODES_MAP),
4553                *sceneMode);
4554        if (NAME_NOT_FOUND != val) {
4555            uint8_t fwkSceneMode = (uint8_t)val;
4556            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4557            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4558                     fwkSceneMode);
4559        }
4560    }
4561
4562    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4563        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4564        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4565    }
4566
4567    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4568        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4569        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4570    }
4571
4572    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4573        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4574        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4575    }
4576
4577    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4578            CAM_INTF_META_EDGE_MODE, metadata) {
4579        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4580    }
4581
4582    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4583        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4584        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4585    }
4586
4587    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4588        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4589    }
4590
4591    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4592        if (0 <= *flashState) {
4593            uint8_t fwk_flashState = (uint8_t) *flashState;
4594            if (!gCamCapability[mCameraId]->flash_available) {
4595                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4596            }
4597            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4598        }
4599    }
4600
4601    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4602        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4603        if (NAME_NOT_FOUND != val) {
4604            uint8_t fwk_flashMode = (uint8_t)val;
4605            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4606        }
4607    }
4608
4609    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4610        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4611        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4612    }
4613
4614    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4615        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4616    }
4617
4618    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4619        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4620    }
4621
4622    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4623        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4624    }
4625
4626    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4627        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4628        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4629    }
4630
4631    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4632        uint8_t fwk_videoStab = (uint8_t) *videoStab;
4633        LOGD("fwk_videoStab = %d", fwk_videoStab);
4634        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4635    } else {
4636        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
4637        // and so hardcoding the Video Stab result to OFF mode.
4638        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4639        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
4640        LOGD("%s: EIS result default to OFF mode", __func__);
4641    }
4642
4643    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4644        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4645        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4646    }
4647
4648    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4649        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4650    }
4651
4652    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4653        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4654
4655        LOGD("dynamicblackLevel = %f %f %f %f",
4656          blackLevelSourcePattern->cam_black_level[0],
4657          blackLevelSourcePattern->cam_black_level[1],
4658          blackLevelSourcePattern->cam_black_level[2],
4659          blackLevelSourcePattern->cam_black_level[3]);
4660    }
4661
4662    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4663        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4664        float fwk_blackLevelInd[4];
4665
4666        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4667        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4668        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4669        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4670
4671        LOGD("applied dynamicblackLevel = %f %f %f %f",
4672          blackLevelAppliedPattern->cam_black_level[0],
4673          blackLevelAppliedPattern->cam_black_level[1],
4674          blackLevelAppliedPattern->cam_black_level[2],
4675          blackLevelAppliedPattern->cam_black_level[3]);
4676        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4677
4678        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
4679        // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4680        // depth space.
4681        fwk_blackLevelInd[0] /= 64.0;
4682        fwk_blackLevelInd[1] /= 64.0;
4683        fwk_blackLevelInd[2] /= 64.0;
4684        fwk_blackLevelInd[3] /= 64.0;
4685        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4686    }
4687
4688    // Fixed whitelevel is used by ISP/Sensor
4689    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4690            &gCamCapability[mCameraId]->white_level, 1);
4691
4692    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4693            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4694        int32_t scalerCropRegion[4];
4695        scalerCropRegion[0] = hScalerCropRegion->left;
4696        scalerCropRegion[1] = hScalerCropRegion->top;
4697        scalerCropRegion[2] = hScalerCropRegion->width;
4698        scalerCropRegion[3] = hScalerCropRegion->height;
4699
4700        // Adjust crop region from sensor output coordinate system to active
4701        // array coordinate system.
4702        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4703                scalerCropRegion[2], scalerCropRegion[3]);
4704
4705        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4706    }
4707
4708    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4709        LOGD("sensorExpTime = %lld", *sensorExpTime);
4710        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4711    }
4712
4713    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4714            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4715        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
4716        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4717    }
4718
4719    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4720            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4721        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
4722        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4723                sensorRollingShutterSkew, 1);
4724    }
4725
4726    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4727        LOGD("sensorSensitivity = %d", *sensorSensitivity);
4728        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4729
4730        //calculate the noise profile based on sensitivity
4731        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4732        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4733        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4734        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4735            noise_profile[i]   = noise_profile_S;
4736            noise_profile[i+1] = noise_profile_O;
4737        }
4738        LOGD("noise model entry (S, O) is (%f, %f)",
4739                noise_profile_S, noise_profile_O);
4740        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4741                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4742    }
4743
4744    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
4745        int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
4746        camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
4747    }
4748
4749    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4750        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4751        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4752    }
4753
4754    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4755        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4756                *faceDetectMode);
4757        if (NAME_NOT_FOUND != val) {
4758            uint8_t fwk_faceDetectMode = (uint8_t)val;
4759            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4760
4761            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4762                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4763                        CAM_INTF_META_FACE_DETECTION, metadata) {
4764                    uint8_t numFaces = MIN(
4765                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4766                    int32_t faceIds[MAX_ROI];
4767                    uint8_t faceScores[MAX_ROI];
4768                    int32_t faceRectangles[MAX_ROI * 4];
4769                    int32_t faceLandmarks[MAX_ROI * 6];
4770                    size_t j = 0, k = 0;
4771
4772                    for (size_t i = 0; i < numFaces; i++) {
4773                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4774                        // Adjust crop region from sensor output coordinate system to active
4775                        // array coordinate system.
4776                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4777                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4778                                rect.width, rect.height);
4779
4780                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4781                                faceRectangles+j, -1);
4782
4783                        j+= 4;
4784                    }
4785                    if (numFaces <= 0) {
4786                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4787                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4788                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4789                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4790                    }
4791
4792                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4793                            numFaces);
4794                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4795                            faceRectangles, numFaces * 4U);
4796                    if (fwk_faceDetectMode ==
4797                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4798                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
4799                                CAM_INTF_META_FACE_LANDMARK, metadata) {
4800
4801                            for (size_t i = 0; i < numFaces; i++) {
4802                                // Map the co-ordinate sensor output coordinate system to active
4803                                // array coordinate system.
4804                                mCropRegionMapper.toActiveArray(
4805                                        landmarks->face_landmarks[i].left_eye_center.x,
4806                                        landmarks->face_landmarks[i].left_eye_center.y);
4807                                mCropRegionMapper.toActiveArray(
4808                                        landmarks->face_landmarks[i].right_eye_center.x,
4809                                        landmarks->face_landmarks[i].right_eye_center.y);
4810                                mCropRegionMapper.toActiveArray(
4811                                        landmarks->face_landmarks[i].mouth_center.x,
4812                                        landmarks->face_landmarks[i].mouth_center.y);
4813
4814                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
4815                                k+= 6;
4816                            }
4817                        }
4818
4819                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4820                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4821                                faceLandmarks, numFaces * 6U);
4822                   }
4823                }
4824            }
4825        }
4826    }
4827
4828    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4829        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4830        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4831    }
4832
4833    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4834            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4835        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4836        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4837    }
4838
4839    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4840            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4841        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4842                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4843    }
4844
4845    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4846            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4847        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4848                CAM_MAX_SHADING_MAP_HEIGHT);
4849        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4850                CAM_MAX_SHADING_MAP_WIDTH);
4851        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4852                lensShadingMap->lens_shading, 4U * map_width * map_height);
4853    }
4854
4855    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4856        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4857        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4858    }
4859
4860    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4861        //Populate CAM_INTF_META_TONEMAP_CURVES
4862        /* ch0 = G, ch 1 = B, ch 2 = R*/
4863        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4864            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
4865                     tonemap->tonemap_points_cnt,
4866                    CAM_MAX_TONEMAP_CURVE_SIZE);
4867            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4868        }
4869
4870        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4871                        &tonemap->curves[0].tonemap_points[0][0],
4872                        tonemap->tonemap_points_cnt * 2);
4873
4874        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4875                        &tonemap->curves[1].tonemap_points[0][0],
4876                        tonemap->tonemap_points_cnt * 2);
4877
4878        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4879                        &tonemap->curves[2].tonemap_points[0][0],
4880                        tonemap->tonemap_points_cnt * 2);
4881    }
4882
4883    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4884            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4885        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4886                CC_GAINS_COUNT);
4887    }
4888
4889    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4890            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4891        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4892                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4893                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4894    }
4895
4896    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4897            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4898        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4899            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
4900                     toneCurve->tonemap_points_cnt,
4901                    CAM_MAX_TONEMAP_CURVE_SIZE);
4902            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4903        }
4904        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4905                (float*)toneCurve->curve.tonemap_points,
4906                toneCurve->tonemap_points_cnt * 2);
4907    }
4908
4909    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4910            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4911        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4912                predColorCorrectionGains->gains, 4);
4913    }
4914
4915    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4916            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4917        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4918                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4919                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4920    }
4921
4922    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4923        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4924    }
4925
4926    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4927        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4928        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4929    }
4930
4931    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4932        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4933        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4934    }
4935
4936    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4937        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4938                *effectMode);
4939        if (NAME_NOT_FOUND != val) {
4940            uint8_t fwk_effectMode = (uint8_t)val;
4941            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4942        }
4943    }
4944
4945    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4946            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4947        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4948                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4949        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4950            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4951        }
4952        int32_t fwk_testPatternData[4];
4953        fwk_testPatternData[0] = testPatternData->r;
4954        fwk_testPatternData[3] = testPatternData->b;
4955        switch (gCamCapability[mCameraId]->color_arrangement) {
4956        case CAM_FILTER_ARRANGEMENT_RGGB:
4957        case CAM_FILTER_ARRANGEMENT_GRBG:
4958            fwk_testPatternData[1] = testPatternData->gr;
4959            fwk_testPatternData[2] = testPatternData->gb;
4960            break;
4961        case CAM_FILTER_ARRANGEMENT_GBRG:
4962        case CAM_FILTER_ARRANGEMENT_BGGR:
4963            fwk_testPatternData[2] = testPatternData->gr;
4964            fwk_testPatternData[1] = testPatternData->gb;
4965            break;
4966        default:
4967            LOGE("color arrangement %d is not supported",
4968                gCamCapability[mCameraId]->color_arrangement);
4969            break;
4970        }
4971        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4972    }
4973
4974    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4975        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4976    }
4977
4978    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4979        String8 str((const char *)gps_methods);
4980        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4981    }
4982
4983    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4984        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4985    }
4986
4987    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4988        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4989    }
4990
4991    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4992        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4993        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4994    }
4995
4996    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4997        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4998        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4999    }
5000
5001    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5002        int32_t fwk_thumb_size[2];
5003        fwk_thumb_size[0] = thumb_size->width;
5004        fwk_thumb_size[1] = thumb_size->height;
5005        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5006    }
5007
5008    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5009        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5010                privateData,
5011                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5012    }
5013
5014    if (metadata->is_tuning_params_valid) {
5015        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5016        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5017        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5018
5019
5020        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5021                sizeof(uint32_t));
5022        data += sizeof(uint32_t);
5023
5024        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5025                sizeof(uint32_t));
5026        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5027        data += sizeof(uint32_t);
5028
5029        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5030                sizeof(uint32_t));
5031        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5032        data += sizeof(uint32_t);
5033
5034        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5035                sizeof(uint32_t));
5036        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5037        data += sizeof(uint32_t);
5038
5039        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5040                sizeof(uint32_t));
5041        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5042        data += sizeof(uint32_t);
5043
5044        metadata->tuning_params.tuning_mod3_data_size = 0;
5045        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5046                sizeof(uint32_t));
5047        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5048        data += sizeof(uint32_t);
5049
5050        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5051                TUNING_SENSOR_DATA_MAX);
5052        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5053                count);
5054        data += count;
5055
5056        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5057                TUNING_VFE_DATA_MAX);
5058        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5059                count);
5060        data += count;
5061
5062        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5063                TUNING_CPP_DATA_MAX);
5064        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5065                count);
5066        data += count;
5067
5068        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5069                TUNING_CAC_DATA_MAX);
5070        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5071                count);
5072        data += count;
5073
5074        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5075                (int32_t *)(void *)tuning_meta_data_blob,
5076                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5077    }
5078
5079    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5080            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5081        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5082                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5083                NEUTRAL_COL_POINTS);
5084    }
5085
5086    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5087        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5088        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5089    }
5090
5091    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5092        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5093        // Adjust crop region from sensor output coordinate system to active
5094        // array coordinate system.
5095        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5096                hAeRegions->rect.width, hAeRegions->rect.height);
5097
5098        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5099        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5100                REGIONS_TUPLE_COUNT);
5101        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5102                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5103                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5104                hAeRegions->rect.height);
5105    }
5106
5107    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5108        uint8_t fwk_afState = (uint8_t) *afState;
5109        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5110        LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5111    }
5112
5113    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5114        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5115    }
5116
5117    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5118        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5119    }
5120
5121    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5122        uint8_t fwk_lensState = *lensState;
5123        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5124    }
5125
5126    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5127        /*af regions*/
5128        int32_t afRegions[REGIONS_TUPLE_COUNT];
5129        // Adjust crop region from sensor output coordinate system to active
5130        // array coordinate system.
5131        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5132                hAfRegions->rect.width, hAfRegions->rect.height);
5133
5134        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5135        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5136                REGIONS_TUPLE_COUNT);
5137        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5138                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5139                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5140                hAfRegions->rect.height);
5141    }
5142
5143    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5144        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5145                *hal_ab_mode);
5146        if (NAME_NOT_FOUND != val) {
5147            uint8_t fwk_ab_mode = (uint8_t)val;
5148            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5149        }
5150    }
5151
5152    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5153        int val = lookupFwkName(SCENE_MODES_MAP,
5154                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5155        if (NAME_NOT_FOUND != val) {
5156            uint8_t fwkBestshotMode = (uint8_t)val;
5157            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5158            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5159        } else {
5160            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5161        }
5162    }
5163
5164    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5165         uint8_t fwk_mode = (uint8_t) *mode;
5166         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5167    }
5168
5169    /* Constant metadata values to be update*/
5170    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5171    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5172
5173    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5174    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5175
5176    int32_t hotPixelMap[2];
5177    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5178
5179    // CDS
5180    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5181        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5182    }
5183
5184    // TNR
5185    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5186        uint8_t tnr_enable       = tnr->denoise_enable;
5187        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5188
5189        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5190        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5191    }
5192
5193    // Reprocess crop data
5194    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5195        uint8_t cnt = crop_data->num_of_streams;
5196        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5197            // mm-qcamera-daemon only posts crop_data for streams
5198            // not linked to pproc. So no valid crop metadata is not
5199            // necessarily an error case.
5200            LOGD("No valid crop metadata entries");
5201        } else {
5202            uint32_t reproc_stream_id;
5203            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5204                LOGD("No reprocessible stream found, ignore crop data");
5205            } else {
5206                int rc = NO_ERROR;
5207                Vector<int32_t> roi_map;
5208                int32_t *crop = new int32_t[cnt*4];
5209                if (NULL == crop) {
5210                   rc = NO_MEMORY;
5211                }
5212                if (NO_ERROR == rc) {
5213                    int32_t streams_found = 0;
5214                    for (size_t i = 0; i < cnt; i++) {
5215                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5216                            if (pprocDone) {
5217                                // HAL already does internal reprocessing,
5218                                // either via reprocessing before JPEG encoding,
5219                                // or offline postprocessing for pproc bypass case.
5220                                crop[0] = 0;
5221                                crop[1] = 0;
5222                                crop[2] = mInputStreamInfo.dim.width;
5223                                crop[3] = mInputStreamInfo.dim.height;
5224                            } else {
5225                                crop[0] = crop_data->crop_info[i].crop.left;
5226                                crop[1] = crop_data->crop_info[i].crop.top;
5227                                crop[2] = crop_data->crop_info[i].crop.width;
5228                                crop[3] = crop_data->crop_info[i].crop.height;
5229                            }
5230                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5231                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5232                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5233                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5234                            streams_found++;
5235                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5236                                    crop[0], crop[1], crop[2], crop[3]);
5237                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5238                                    crop_data->crop_info[i].roi_map.left,
5239                                    crop_data->crop_info[i].roi_map.top,
5240                                    crop_data->crop_info[i].roi_map.width,
5241                                    crop_data->crop_info[i].roi_map.height);
5242                            break;
5243
5244                       }
5245                    }
5246                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5247                            &streams_found, 1);
5248                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5249                            crop, (size_t)(streams_found * 4));
5250                    if (roi_map.array()) {
5251                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5252                                roi_map.array(), roi_map.size());
5253                    }
5254               }
5255               if (crop) {
5256                   delete [] crop;
5257               }
5258            }
5259        }
5260    }
5261
5262    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5263        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5264        // so hardcoding the CAC result to OFF mode.
5265        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5266        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5267    } else {
5268        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5269            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5270                    *cacMode);
5271            if (NAME_NOT_FOUND != val) {
5272                uint8_t resultCacMode = (uint8_t)val;
5273                // check whether CAC result from CB is equal to Framework set CAC mode
5274                // If not equal then set the CAC mode came in corresponding request
5275                if (fwk_cacMode != resultCacMode) {
5276                    resultCacMode = fwk_cacMode;
5277                }
5278                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5279                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5280            } else {
5281                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5282            }
5283        }
5284    }
5285
5286    // Post blob of cam_cds_data through vendor tag.
5287    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5288        uint8_t cnt = cdsInfo->num_of_streams;
5289        cam_cds_data_t cdsDataOverride;
5290        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5291        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5292        cdsDataOverride.num_of_streams = 1;
5293        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5294            uint32_t reproc_stream_id;
5295            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5296                LOGD("No reprocessible stream found, ignore cds data");
5297            } else {
5298                for (size_t i = 0; i < cnt; i++) {
5299                    if (cdsInfo->cds_info[i].stream_id ==
5300                            reproc_stream_id) {
5301                        cdsDataOverride.cds_info[0].cds_enable =
5302                                cdsInfo->cds_info[i].cds_enable;
5303                        break;
5304                    }
5305                }
5306            }
5307        } else {
5308            LOGD("Invalid stream count %d in CDS_DATA", cnt);
5309        }
5310        camMetadata.update(QCAMERA3_CDS_INFO,
5311                (uint8_t *)&cdsDataOverride,
5312                sizeof(cam_cds_data_t));
5313    }
5314
5315    // Ldaf calibration data
5316    if (!mLdafCalibExist) {
5317        IF_META_AVAILABLE(uint32_t, ldafCalib,
5318                CAM_INTF_META_LDAF_EXIF, metadata) {
5319            mLdafCalibExist = true;
5320            mLdafCalib[0] = ldafCalib[0];
5321            mLdafCalib[1] = ldafCalib[1];
5322            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5323                    ldafCalib[0], ldafCalib[1]);
5324        }
5325    }
5326
5327    resultMetadata = camMetadata.release();
5328    return resultMetadata;
5329}
5330
5331/*===========================================================================
5332 * FUNCTION   : saveExifParams
5333 *
5334 * DESCRIPTION:
5335 *
5336 * PARAMETERS :
5337 *   @metadata : metadata information from callback
5338 *
5339 * RETURN     : none
5340 *
5341 *==========================================================================*/
5342void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5343{
5344    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5345            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5346        if (mExifParams.debug_params) {
5347            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5348            mExifParams.debug_params->ae_debug_params_valid = TRUE;
5349        }
5350    }
5351    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5352            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5353        if (mExifParams.debug_params) {
5354            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5355            mExifParams.debug_params->awb_debug_params_valid = TRUE;
5356        }
5357    }
5358    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5359            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5360        if (mExifParams.debug_params) {
5361            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5362            mExifParams.debug_params->af_debug_params_valid = TRUE;
5363        }
5364    }
5365    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5366            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5367        if (mExifParams.debug_params) {
5368            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5369            mExifParams.debug_params->asd_debug_params_valid = TRUE;
5370        }
5371    }
5372    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5373            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5374        if (mExifParams.debug_params) {
5375            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5376            mExifParams.debug_params->stats_debug_params_valid = TRUE;
5377        }
5378    }
5379}
5380
5381/*===========================================================================
5382 * FUNCTION   : get3AExifParams
5383 *
5384 * DESCRIPTION:
5385 *
5386 * PARAMETERS : none
5387 *
5388 *
5389 * RETURN     : mm_jpeg_exif_params_t
5390 *
5391 *==========================================================================*/
5392mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5393{
5394    return mExifParams;
5395}
5396
5397/*===========================================================================
5398 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
5399 *
5400 * DESCRIPTION:
5401 *
5402 * PARAMETERS :
5403 *   @metadata : metadata information from callback
5404 *
5405 * RETURN     : camera_metadata_t*
5406 *              metadata in a format specified by fwk
5407 *==========================================================================*/
5408camera_metadata_t*
5409QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5410                                (metadata_buffer_t *metadata)
5411{
5412    CameraMetadata camMetadata;
5413    camera_metadata_t *resultMetadata;
5414
5415
5416    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5417        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5418        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5419        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
5420    }
5421
5422    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5423        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5424                &aecTrigger->trigger, 1);
5425        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5426                &aecTrigger->trigger_id, 1);
5427        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5428                 aecTrigger->trigger);
5429        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
5430                aecTrigger->trigger_id);
5431    }
5432
5433    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5434        uint8_t fwk_ae_state = (uint8_t) *ae_state;
5435        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5436        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
5437    }
5438
5439    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5440        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5441        if (NAME_NOT_FOUND != val) {
5442            uint8_t fwkAfMode = (uint8_t)val;
5443            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5444            LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5445        } else {
5446            LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5447                    val);
5448        }
5449    }
5450
5451    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5452        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5453                &af_trigger->trigger, 1);
5454        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5455                 af_trigger->trigger);
5456        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5457        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
5458                af_trigger->trigger_id);
5459    }
5460
5461    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5462        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5463                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5464        if (NAME_NOT_FOUND != val) {
5465            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5466            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5467            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
5468        } else {
5469            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
5470        }
5471    }
5472
5473    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5474    uint32_t aeMode = CAM_AE_MODE_MAX;
5475    int32_t flashMode = CAM_FLASH_MODE_MAX;
5476    int32_t redeye = -1;
5477    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5478        aeMode = *pAeMode;
5479    }
5480    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5481        flashMode = *pFlashMode;
5482    }
5483    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5484        redeye = *pRedeye;
5485    }
5486
5487    if (1 == redeye) {
5488        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5489        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5490    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5491        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5492                flashMode);
5493        if (NAME_NOT_FOUND != val) {
5494            fwk_aeMode = (uint8_t)val;
5495            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5496        } else {
5497            LOGE("Unsupported flash mode %d", flashMode);
5498        }
5499    } else if (aeMode == CAM_AE_MODE_ON) {
5500        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5501        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5502    } else if (aeMode == CAM_AE_MODE_OFF) {
5503        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5504        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5505    } else {
5506        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5507              "flashMode:%d, aeMode:%u!!!",
5508                 redeye, flashMode, aeMode);
5509    }
5510
5511    resultMetadata = camMetadata.release();
5512    return resultMetadata;
5513}
5514
5515/*===========================================================================
5516 * FUNCTION   : dumpMetadataToFile
5517 *
5518 * DESCRIPTION: Dumps tuning metadata to file system
5519 *
5520 * PARAMETERS :
5521 *   @meta           : tuning metadata
5522 *   @dumpFrameCount : current dump frame count
5523 *   @enabled        : Enable mask
5524 *
5525 *==========================================================================*/
5526void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5527                                                   uint32_t &dumpFrameCount,
5528                                                   bool enabled,
5529                                                   const char *type,
5530                                                   uint32_t frameNumber)
5531{
5532    //Some sanity checks
5533    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5534        LOGE("Tuning sensor data size bigger than expected %d: %d",
5535              meta.tuning_sensor_data_size,
5536              TUNING_SENSOR_DATA_MAX);
5537        return;
5538    }
5539
5540    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5541        LOGE("Tuning VFE data size bigger than expected %d: %d",
5542              meta.tuning_vfe_data_size,
5543              TUNING_VFE_DATA_MAX);
5544        return;
5545    }
5546
5547    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5548        LOGE("Tuning CPP data size bigger than expected %d: %d",
5549              meta.tuning_cpp_data_size,
5550              TUNING_CPP_DATA_MAX);
5551        return;
5552    }
5553
5554    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5555        LOGE("Tuning CAC data size bigger than expected %d: %d",
5556              meta.tuning_cac_data_size,
5557              TUNING_CAC_DATA_MAX);
5558        return;
5559    }
5560    //
5561
5562    if(enabled){
5563        char timeBuf[FILENAME_MAX];
5564        char buf[FILENAME_MAX];
5565        memset(buf, 0, sizeof(buf));
5566        memset(timeBuf, 0, sizeof(timeBuf));
5567        time_t current_time;
5568        struct tm * timeinfo;
5569        time (&current_time);
5570        timeinfo = localtime (&current_time);
5571        if (timeinfo != NULL) {
5572            strftime (timeBuf, sizeof(timeBuf),
5573                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5574        }
5575        String8 filePath(timeBuf);
5576        snprintf(buf,
5577                sizeof(buf),
5578                "%dm_%s_%d.bin",
5579                dumpFrameCount,
5580                type,
5581                frameNumber);
5582        filePath.append(buf);
5583        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5584        if (file_fd >= 0) {
5585            ssize_t written_len = 0;
5586            meta.tuning_data_version = TUNING_DATA_VERSION;
5587            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5588            written_len += write(file_fd, data, sizeof(uint32_t));
5589            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5590            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5591            written_len += write(file_fd, data, sizeof(uint32_t));
5592            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5593            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5594            written_len += write(file_fd, data, sizeof(uint32_t));
5595            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5596            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5597            written_len += write(file_fd, data, sizeof(uint32_t));
5598            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5599            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5600            written_len += write(file_fd, data, sizeof(uint32_t));
5601            meta.tuning_mod3_data_size = 0;
5602            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5603            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5604            written_len += write(file_fd, data, sizeof(uint32_t));
5605            size_t total_size = meta.tuning_sensor_data_size;
5606            data = (void *)((uint8_t *)&meta.data);
5607            written_len += write(file_fd, data, total_size);
5608            total_size = meta.tuning_vfe_data_size;
5609            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5610            written_len += write(file_fd, data, total_size);
5611            total_size = meta.tuning_cpp_data_size;
5612            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5613            written_len += write(file_fd, data, total_size);
5614            total_size = meta.tuning_cac_data_size;
5615            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5616            written_len += write(file_fd, data, total_size);
5617            close(file_fd);
5618        }else {
5619            LOGE("fail to open file for metadata dumping");
5620        }
5621    }
5622}
5623
5624/*===========================================================================
5625 * FUNCTION   : cleanAndSortStreamInfo
5626 *
5627 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5628 *              and sort them such that raw stream is at the end of the list
5629 *              This is a workaround for camera daemon constraint.
5630 *
5631 * PARAMETERS : None
5632 *
5633 *==========================================================================*/
5634void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5635{
5636    List<stream_info_t *> newStreamInfo;
5637
5638    /*clean up invalid streams*/
5639    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5640            it != mStreamInfo.end();) {
5641        if(((*it)->status) == INVALID){
5642            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5643            delete channel;
5644            free(*it);
5645            it = mStreamInfo.erase(it);
5646        } else {
5647            it++;
5648        }
5649    }
5650
5651    // Move preview/video/callback/snapshot streams into newList
5652    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5653            it != mStreamInfo.end();) {
5654        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5655                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5656                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5657            newStreamInfo.push_back(*it);
5658            it = mStreamInfo.erase(it);
5659        } else
5660            it++;
5661    }
5662    // Move raw streams into newList
5663    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5664            it != mStreamInfo.end();) {
5665        newStreamInfo.push_back(*it);
5666        it = mStreamInfo.erase(it);
5667    }
5668
5669    mStreamInfo = newStreamInfo;
5670}
5671
5672/*===========================================================================
5673 * FUNCTION   : extractJpegMetadata
5674 *
5675 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5676 *              JPEG metadata is cached in HAL, and return as part of capture
5677 *              result when metadata is returned from camera daemon.
5678 *
5679 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5680 *              @request:      capture request
5681 *
5682 *==========================================================================*/
5683void QCamera3HardwareInterface::extractJpegMetadata(
5684        CameraMetadata& jpegMetadata,
5685        const camera3_capture_request_t *request)
5686{
5687    CameraMetadata frame_settings;
5688    frame_settings = request->settings;
5689
5690    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5691        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5692                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5693                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5694
5695    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5696        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5697                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5698                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5699
5700    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5701        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5702                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5703                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5704
5705    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5706        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5707                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5708                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5709
5710    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5711        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5712                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5713                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5714
5715    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5716        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5717                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5718                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5719
5720    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5721        int32_t thumbnail_size[2];
5722        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5723        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5724        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5725            int32_t orientation =
5726                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5727            if ((orientation == 90) || (orientation == 270)) {
5728               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5729               int32_t temp;
5730               temp = thumbnail_size[0];
5731               thumbnail_size[0] = thumbnail_size[1];
5732               thumbnail_size[1] = temp;
5733            }
5734         }
5735         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5736                thumbnail_size,
5737                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5738    }
5739
5740}
5741
5742/*===========================================================================
5743 * FUNCTION   : convertToRegions
5744 *
5745 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5746 *
5747 * PARAMETERS :
5748 *   @rect   : cam_rect_t struct to convert
5749 *   @region : int32_t destination array
5750 *   @weight : if we are converting from cam_area_t, weight is valid
5751 *             else weight = -1
5752 *
5753 *==========================================================================*/
5754void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5755        int32_t *region, int weight)
5756{
5757    region[0] = rect.left;
5758    region[1] = rect.top;
5759    region[2] = rect.left + rect.width;
5760    region[3] = rect.top + rect.height;
5761    if (weight > -1) {
5762        region[4] = weight;
5763    }
5764}
5765
5766/*===========================================================================
5767 * FUNCTION   : convertFromRegions
5768 *
5769 * DESCRIPTION: helper method to convert from array to cam_rect_t
5770 *
5771 * PARAMETERS :
5772 *   @rect   : cam_rect_t struct to convert
5773 *   @region : int32_t destination array
5774 *   @weight : if we are converting from cam_area_t, weight is valid
5775 *             else weight = -1
5776 *
5777 *==========================================================================*/
5778void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5779        const camera_metadata_t *settings, uint32_t tag)
5780{
5781    CameraMetadata frame_settings;
5782    frame_settings = settings;
5783    int32_t x_min = frame_settings.find(tag).data.i32[0];
5784    int32_t y_min = frame_settings.find(tag).data.i32[1];
5785    int32_t x_max = frame_settings.find(tag).data.i32[2];
5786    int32_t y_max = frame_settings.find(tag).data.i32[3];
5787    roi.weight = frame_settings.find(tag).data.i32[4];
5788    roi.rect.left = x_min;
5789    roi.rect.top = y_min;
5790    roi.rect.width = x_max - x_min;
5791    roi.rect.height = y_max - y_min;
5792}
5793
5794/*===========================================================================
5795 * FUNCTION   : resetIfNeededROI
5796 *
5797 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5798 *              crop region
5799 *
5800 * PARAMETERS :
5801 *   @roi       : cam_area_t struct to resize
5802 *   @scalerCropRegion : cam_crop_region_t region to compare against
5803 *
5804 *
5805 *==========================================================================*/
5806bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5807                                                 const cam_crop_region_t* scalerCropRegion)
5808{
5809    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5810    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5811    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5812    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5813
5814    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5815     * without having this check the calculations below to validate if the roi
5816     * is inside scalar crop region will fail resulting in the roi not being
5817     * reset causing algorithm to continue to use stale roi window
5818     */
5819    if (roi->weight == 0) {
5820        return true;
5821    }
5822
5823    if ((roi_x_max < scalerCropRegion->left) ||
5824        // right edge of roi window is left of scalar crop's left edge
5825        (roi_y_max < scalerCropRegion->top)  ||
5826        // bottom edge of roi window is above scalar crop's top edge
5827        (roi->rect.left > crop_x_max) ||
5828        // left edge of roi window is beyond(right) of scalar crop's right edge
5829        (roi->rect.top > crop_y_max)){
5830        // top edge of roi windo is above scalar crop's top edge
5831        return false;
5832    }
5833    if (roi->rect.left < scalerCropRegion->left) {
5834        roi->rect.left = scalerCropRegion->left;
5835    }
5836    if (roi->rect.top < scalerCropRegion->top) {
5837        roi->rect.top = scalerCropRegion->top;
5838    }
5839    if (roi_x_max > crop_x_max) {
5840        roi_x_max = crop_x_max;
5841    }
5842    if (roi_y_max > crop_y_max) {
5843        roi_y_max = crop_y_max;
5844    }
5845    roi->rect.width = roi_x_max - roi->rect.left;
5846    roi->rect.height = roi_y_max - roi->rect.top;
5847    return true;
5848}
5849
5850/*===========================================================================
5851 * FUNCTION   : convertLandmarks
5852 *
5853 * DESCRIPTION: helper method to extract the landmarks from face detection info
5854 *
5855 * PARAMETERS :
5856 *   @landmark_data : input landmark data to be converted
5857 *   @landmarks : int32_t destination array
5858 *
5859 *
5860 *==========================================================================*/
5861void QCamera3HardwareInterface::convertLandmarks(
5862        cam_face_landmarks_info_t landmark_data,
5863        int32_t *landmarks)
5864{
5865    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
5866    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
5867    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
5868    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
5869    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
5870    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
5871}
5872
5873#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5874/*===========================================================================
5875 * FUNCTION   : initCapabilities
5876 *
5877 * DESCRIPTION: initialize camera capabilities in static data struct
5878 *
5879 * PARAMETERS :
5880 *   @cameraId  : camera Id
5881 *
5882 * RETURN     : int32_t type of status
5883 *              NO_ERROR  -- success
5884 *              none-zero failure code
5885 *==========================================================================*/
5886int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5887{
5888    int rc = 0;
5889    mm_camera_vtbl_t *cameraHandle = NULL;
5890    QCamera3HeapMemory *capabilityHeap = NULL;
5891
5892    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5893    if (rc) {
5894        LOGE("camera_open failed. rc = %d", rc);
5895        goto open_failed;
5896    }
5897    if (!cameraHandle) {
5898        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
5899        goto open_failed;
5900    }
5901
5902    capabilityHeap = new QCamera3HeapMemory(1);
5903    if (capabilityHeap == NULL) {
5904        LOGE("creation of capabilityHeap failed");
5905        goto heap_creation_failed;
5906    }
5907    /* Allocate memory for capability buffer */
5908    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5909    if(rc != OK) {
5910        LOGE("No memory for cappability");
5911        goto allocate_failed;
5912    }
5913
5914    /* Map memory for capability buffer */
5915    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5916    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5917                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5918                                capabilityHeap->getFd(0),
5919                                sizeof(cam_capability_t));
5920    if(rc < 0) {
5921        LOGE("failed to map capability buffer");
5922        goto map_failed;
5923    }
5924
5925    /* Query Capability */
5926    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5927    if(rc < 0) {
5928        LOGE("failed to query capability");
5929        goto query_failed;
5930    }
5931    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5932    if (!gCamCapability[cameraId]) {
5933        LOGE("out of memory");
5934        goto query_failed;
5935    }
5936    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5937                                        sizeof(cam_capability_t));
5938    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_x = 0;
5939    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_y = 0;
5940    rc = 0;
5941
5942query_failed:
5943    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5944                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5945map_failed:
5946    capabilityHeap->deallocate();
5947allocate_failed:
5948    delete capabilityHeap;
5949heap_creation_failed:
5950    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5951    cameraHandle = NULL;
5952open_failed:
5953    return rc;
5954}
5955
5956/*==========================================================================
5957 * FUNCTION   : get3Aversion
5958 *
5959 * DESCRIPTION: get the Q3A S/W version
5960 *
5961 * PARAMETERS :
5962 *  @sw_version: Reference of Q3A structure which will hold version info upon
5963 *               return
5964 *
5965 * RETURN     : None
5966 *
5967 *==========================================================================*/
5968void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5969{
5970    if(gCamCapability[mCameraId])
5971        sw_version = gCamCapability[mCameraId]->q3a_version;
5972    else
5973        LOGE("Capability structure NULL!");
5974}
5975
5976
5977/*===========================================================================
5978 * FUNCTION   : initParameters
5979 *
5980 * DESCRIPTION: initialize camera parameters
5981 *
5982 * PARAMETERS :
5983 *
5984 * RETURN     : int32_t type of status
5985 *              NO_ERROR  -- success
5986 *              none-zero failure code
5987 *==========================================================================*/
5988int QCamera3HardwareInterface::initParameters()
5989{
5990    int rc = 0;
5991
5992    //Allocate Set Param Buffer
5993    mParamHeap = new QCamera3HeapMemory(1);
5994    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5995    if(rc != OK) {
5996        rc = NO_MEMORY;
5997        LOGE("Failed to allocate SETPARM Heap memory");
5998        delete mParamHeap;
5999        mParamHeap = NULL;
6000        return rc;
6001    }
6002
6003    //Map memory for parameters buffer
6004    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6005            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6006            mParamHeap->getFd(0),
6007            sizeof(metadata_buffer_t));
6008    if(rc < 0) {
6009        LOGE("failed to map SETPARM buffer");
6010        rc = FAILED_TRANSACTION;
6011        mParamHeap->deallocate();
6012        delete mParamHeap;
6013        mParamHeap = NULL;
6014        return rc;
6015    }
6016
6017    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6018
6019    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6020    return rc;
6021}
6022
6023/*===========================================================================
6024 * FUNCTION   : deinitParameters
6025 *
6026 * DESCRIPTION: de-initialize camera parameters
6027 *
6028 * PARAMETERS :
6029 *
6030 * RETURN     : NONE
6031 *==========================================================================*/
6032void QCamera3HardwareInterface::deinitParameters()
6033{
6034    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6035            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6036
6037    mParamHeap->deallocate();
6038    delete mParamHeap;
6039    mParamHeap = NULL;
6040
6041    mParameters = NULL;
6042
6043    free(mPrevParameters);
6044    mPrevParameters = NULL;
6045}
6046
6047/*===========================================================================
6048 * FUNCTION   : calcMaxJpegSize
6049 *
6050 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6051 *
6052 * PARAMETERS :
6053 *
6054 * RETURN     : max_jpeg_size
6055 *==========================================================================*/
6056size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6057{
6058    size_t max_jpeg_size = 0;
6059    size_t temp_width, temp_height;
6060    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6061            MAX_SIZES_CNT);
6062    for (size_t i = 0; i < count; i++) {
6063        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6064        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6065        if (temp_width * temp_height > max_jpeg_size ) {
6066            max_jpeg_size = temp_width * temp_height;
6067        }
6068    }
6069    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6070    return max_jpeg_size;
6071}
6072
6073/*===========================================================================
6074 * FUNCTION   : getMaxRawSize
6075 *
6076 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6077 *
6078 * PARAMETERS :
6079 *
6080 * RETURN     : Largest supported Raw Dimension
6081 *==========================================================================*/
6082cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6083{
6084    int max_width = 0;
6085    cam_dimension_t maxRawSize;
6086
6087    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6088    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6089        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6090            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6091            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6092        }
6093    }
6094    return maxRawSize;
6095}
6096
6097
6098/*===========================================================================
6099 * FUNCTION   : calcMaxJpegDim
6100 *
6101 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6102 *
6103 * PARAMETERS :
6104 *
6105 * RETURN     : max_jpeg_dim
6106 *==========================================================================*/
6107cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6108{
6109    cam_dimension_t max_jpeg_dim;
6110    cam_dimension_t curr_jpeg_dim;
6111    max_jpeg_dim.width = 0;
6112    max_jpeg_dim.height = 0;
6113    curr_jpeg_dim.width = 0;
6114    curr_jpeg_dim.height = 0;
6115    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6116        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6117        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6118        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6119            max_jpeg_dim.width * max_jpeg_dim.height ) {
6120            max_jpeg_dim.width = curr_jpeg_dim.width;
6121            max_jpeg_dim.height = curr_jpeg_dim.height;
6122        }
6123    }
6124    return max_jpeg_dim;
6125}
6126
6127/*===========================================================================
6128 * FUNCTION   : addStreamConfig
6129 *
6130 * DESCRIPTION: adds the stream configuration to the array
6131 *
6132 * PARAMETERS :
6133 * @available_stream_configs : pointer to stream configuration array
6134 * @scalar_format            : scalar format
6135 * @dim                      : configuration dimension
6136 * @config_type              : input or output configuration type
6137 *
6138 * RETURN     : NONE
6139 *==========================================================================*/
6140void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6141        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6142{
6143    available_stream_configs.add(scalar_format);
6144    available_stream_configs.add(dim.width);
6145    available_stream_configs.add(dim.height);
6146    available_stream_configs.add(config_type);
6147}
6148
6149/*===========================================================================
6150 * FUNCTION   : suppportBurstCapture
6151 *
6152 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6153 *
6154 * PARAMETERS :
6155 *   @cameraId  : camera Id
6156 *
6157 * RETURN     : true if camera supports BURST_CAPTURE
6158 *              false otherwise
6159 *==========================================================================*/
6160bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6161{
6162    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6163    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6164    const int32_t highResWidth = 3264;
6165    const int32_t highResHeight = 2448;
6166
6167    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6168        // Maximum resolution images cannot be captured at >= 10fps
6169        // -> not supporting BURST_CAPTURE
6170        return false;
6171    }
6172
6173    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6174        // Maximum resolution images can be captured at >= 20fps
6175        // --> supporting BURST_CAPTURE
6176        return true;
6177    }
6178
6179    // Find the smallest highRes resolution, or largest resolution if there is none
6180    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6181            MAX_SIZES_CNT);
6182    size_t highRes = 0;
6183    while ((highRes + 1 < totalCnt) &&
6184            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6185            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6186            highResWidth * highResHeight)) {
6187        highRes++;
6188    }
6189    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6190        return true;
6191    } else {
6192        return false;
6193    }
6194}
6195
6196/*===========================================================================
6197 * FUNCTION   : initStaticMetadata
6198 *
6199 * DESCRIPTION: initialize the static metadata
6200 *
6201 * PARAMETERS :
6202 *   @cameraId  : camera Id
6203 *
6204 * RETURN     : int32_t type of status
6205 *              0  -- success
6206 *              non-zero failure code
6207 *==========================================================================*/
6208int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6209{
6210    int rc = 0;
6211    CameraMetadata staticInfo;
6212    size_t count = 0;
6213    bool limitedDevice = false;
6214    char prop[PROPERTY_VALUE_MAX];
6215    bool supportBurst = false;
6216
6217    supportBurst = supportBurstCapture(cameraId);
6218
6219    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6220     * guaranteed or if min fps of max resolution is less than 20 fps, its
6221     * advertised as limited device*/
6222    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6223            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6224            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6225            !supportBurst;
6226
6227    uint8_t supportedHwLvl = limitedDevice ?
6228            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6229            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
6230
6231    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6232            &supportedHwLvl, 1);
6233
6234    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6235    /*HAL 3 only*/
6236    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6237                    &gCamCapability[cameraId]->min_focus_distance, 1);
6238
6239    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6240                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6241
6242    /*should be using focal lengths but sensor doesn't provide that info now*/
6243    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6244                      &gCamCapability[cameraId]->focal_length,
6245                      1);
6246
6247    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6248            gCamCapability[cameraId]->apertures,
6249            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6250
6251    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6252            gCamCapability[cameraId]->filter_densities,
6253            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6254
6255
6256    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6257            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6258            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6259
6260    int32_t lens_shading_map_size[] = {
6261            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6262            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6263    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6264                      lens_shading_map_size,
6265                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6266
6267    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6268            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6269
6270    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6271            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6272
6273    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6274            &gCamCapability[cameraId]->max_frame_duration, 1);
6275
6276    camera_metadata_rational baseGainFactor = {
6277            gCamCapability[cameraId]->base_gain_factor.numerator,
6278            gCamCapability[cameraId]->base_gain_factor.denominator};
6279    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6280                      &baseGainFactor, 1);
6281
6282    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6283                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6284
6285    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6286            gCamCapability[cameraId]->pixel_array_size.height};
6287    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6288                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6289
6290    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6291            gCamCapability[cameraId]->active_array_size.top,
6292            gCamCapability[cameraId]->active_array_size.width,
6293            gCamCapability[cameraId]->active_array_size.height};
6294    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6295            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6296
6297    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6298            &gCamCapability[cameraId]->white_level, 1);
6299
6300    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6301            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6302
6303    bool hasBlackRegions = false;
6304    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6305        LOGW("black_region_count: %d is bounded to %d",
6306            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6307        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6308    }
6309    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6310        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6311        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6312            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6313        }
6314        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6315                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6316        hasBlackRegions = true;
6317    }
6318
6319    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6320            &gCamCapability[cameraId]->flash_charge_duration, 1);
6321
6322    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6323            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6324
6325    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
6326    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6327            &timestampSource, 1);
6328
6329    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6330            &gCamCapability[cameraId]->histogram_size, 1);
6331
6332    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6333            &gCamCapability[cameraId]->max_histogram_count, 1);
6334
6335    int32_t sharpness_map_size[] = {
6336            gCamCapability[cameraId]->sharpness_map_size.width,
6337            gCamCapability[cameraId]->sharpness_map_size.height};
6338
6339    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6340            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6341
6342    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6343            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6344
6345    int32_t scalar_formats[] = {
6346            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6347            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6348            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6349            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6350            HAL_PIXEL_FORMAT_RAW10,
6351            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6352    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6353    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6354                      scalar_formats,
6355                      scalar_formats_count);
6356
6357    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6358    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6359    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6360            count, MAX_SIZES_CNT, available_processed_sizes);
6361    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6362            available_processed_sizes, count * 2);
6363
6364    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6365    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6366    makeTable(gCamCapability[cameraId]->raw_dim,
6367            count, MAX_SIZES_CNT, available_raw_sizes);
6368    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6369            available_raw_sizes, count * 2);
6370
6371    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6372    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6373    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6374            count, MAX_SIZES_CNT, available_fps_ranges);
6375    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6376            available_fps_ranges, count * 2);
6377
6378    camera_metadata_rational exposureCompensationStep = {
6379            gCamCapability[cameraId]->exp_compensation_step.numerator,
6380            gCamCapability[cameraId]->exp_compensation_step.denominator};
6381    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6382                      &exposureCompensationStep, 1);
6383
6384    Vector<uint8_t> availableVstabModes;
6385    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6386    char eis_prop[PROPERTY_VALUE_MAX];
6387    memset(eis_prop, 0, sizeof(eis_prop));
6388    property_get("persist.camera.eis.enable", eis_prop, "0");
6389    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6390    if (facingBack && eis_prop_set) {
6391        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6392    }
6393    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6394                      availableVstabModes.array(), availableVstabModes.size());
6395
6396    /*HAL 1 and HAL 3 common*/
6397    float maxZoom = 4;
6398    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6399            &maxZoom, 1);
6400
6401    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
6402    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6403
6404    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6405    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6406        max3aRegions[2] = 0; /* AF not supported */
6407    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6408            max3aRegions, 3);
6409
6410    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6411    memset(prop, 0, sizeof(prop));
6412    property_get("persist.camera.facedetect", prop, "1");
6413    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6414    LOGD("Support face detection mode: %d",
6415             supportedFaceDetectMode);
6416
6417    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6418    Vector<uint8_t> availableFaceDetectModes;
6419    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6420    if (supportedFaceDetectMode == 1) {
6421        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6422    } else if (supportedFaceDetectMode == 2) {
6423        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6424    } else if (supportedFaceDetectMode == 3) {
6425        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6426        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6427    } else {
6428        maxFaces = 0;
6429    }
6430    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6431            availableFaceDetectModes.array(),
6432            availableFaceDetectModes.size());
6433    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6434            (int32_t *)&maxFaces, 1);
6435
6436    int32_t exposureCompensationRange[] = {
6437            gCamCapability[cameraId]->exposure_compensation_min,
6438            gCamCapability[cameraId]->exposure_compensation_max};
6439    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6440            exposureCompensationRange,
6441            sizeof(exposureCompensationRange)/sizeof(int32_t));
6442
6443    uint8_t lensFacing = (facingBack) ?
6444            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6445    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6446
6447    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6448                      available_thumbnail_sizes,
6449                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6450
6451    /*all sizes will be clubbed into this tag*/
6452    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6453    /*android.scaler.availableStreamConfigurations*/
6454    Vector<int32_t> available_stream_configs;
6455    cam_dimension_t active_array_dim;
6456    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6457    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6458    /* Add input/output stream configurations for each scalar formats*/
6459    for (size_t j = 0; j < scalar_formats_count; j++) {
6460        switch (scalar_formats[j]) {
6461        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6462        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6463        case HAL_PIXEL_FORMAT_RAW10:
6464            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6465                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6466                addStreamConfig(available_stream_configs, scalar_formats[j],
6467                        gCamCapability[cameraId]->raw_dim[i],
6468                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6469            }
6470            break;
6471        case HAL_PIXEL_FORMAT_BLOB:
6472            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6473                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6474                addStreamConfig(available_stream_configs, scalar_formats[j],
6475                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6476                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6477            }
6478            break;
6479        case HAL_PIXEL_FORMAT_YCbCr_420_888:
6480        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6481        default:
6482            cam_dimension_t largest_picture_size;
6483            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6484            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6485                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6486                addStreamConfig(available_stream_configs, scalar_formats[j],
6487                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6488                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6489                /* Book keep largest */
6490                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6491                        >= largest_picture_size.width &&
6492                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
6493                        >= largest_picture_size.height)
6494                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6495            }
6496            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6497            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6498                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6499                 addStreamConfig(available_stream_configs, scalar_formats[j],
6500                         largest_picture_size,
6501                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6502            }
6503            break;
6504        }
6505    }
6506
6507    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6508                      available_stream_configs.array(), available_stream_configs.size());
6509    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6510    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6511
6512    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6513    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6514
6515    /* android.scaler.availableMinFrameDurations */
6516    Vector<int64_t> available_min_durations;
6517    for (size_t j = 0; j < scalar_formats_count; j++) {
6518        switch (scalar_formats[j]) {
6519        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6520        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6521        case HAL_PIXEL_FORMAT_RAW10:
6522            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6523                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6524                available_min_durations.add(scalar_formats[j]);
6525                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
6526                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
6527                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
6528            }
6529            break;
6530        default:
6531            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6532                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6533                available_min_durations.add(scalar_formats[j]);
6534                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
6535                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
6536                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
6537            }
6538            break;
6539        }
6540    }
6541    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6542                      available_min_durations.array(), available_min_durations.size());
6543
6544    Vector<int32_t> available_hfr_configs;
6545    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6546        int32_t fps = 0;
6547        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6548        case CAM_HFR_MODE_60FPS:
6549            fps = 60;
6550            break;
6551        case CAM_HFR_MODE_90FPS:
6552            fps = 90;
6553            break;
6554        case CAM_HFR_MODE_120FPS:
6555            fps = 120;
6556            break;
6557        case CAM_HFR_MODE_150FPS:
6558            fps = 150;
6559            break;
6560        case CAM_HFR_MODE_180FPS:
6561            fps = 180;
6562            break;
6563        case CAM_HFR_MODE_210FPS:
6564            fps = 210;
6565            break;
6566        case CAM_HFR_MODE_240FPS:
6567            fps = 240;
6568            break;
6569        case CAM_HFR_MODE_480FPS:
6570            fps = 480;
6571            break;
6572        case CAM_HFR_MODE_OFF:
6573        case CAM_HFR_MODE_MAX:
6574        default:
6575            break;
6576        }
6577
6578        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6579        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6580            /* For each HFR frame rate, need to advertise one variable fps range
6581             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
6582             * [120, 120]. While camcorder preview alone is running [30, 120] is
6583             * set by the app. When video recording is started, [120, 120] is
6584             * set. This way sensor configuration does not change when recording
6585             * is started */
6586
6587            size_t len = sizeof(default_hfr_video_sizes) / sizeof(default_hfr_video_sizes[0]);
6588            for (size_t j = 0; j < len; j++) {
6589                if ((default_hfr_video_sizes[j].width <= gCamCapability[cameraId]->hfr_tbl[i].dim.width) &&
6590                    (default_hfr_video_sizes[j].height <= gCamCapability[cameraId]->hfr_tbl[i].dim.height)) {
6591                    //TODO: Might need additional filtering based on VFE/CPP/CPU capabilities
6592
6593                    /* (width, height, fps_min, fps_max, batch_size_max) */
6594                    available_hfr_configs.add(default_hfr_video_sizes[j].width);
6595                    available_hfr_configs.add(default_hfr_video_sizes[j].height);
6596                    available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6597                    available_hfr_configs.add(fps);
6598                    available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6599
6600                    /* (width, height, fps_min, fps_max, batch_size_max) */
6601                    available_hfr_configs.add(default_hfr_video_sizes[j].width);
6602                    available_hfr_configs.add(default_hfr_video_sizes[j].height);
6603                    available_hfr_configs.add(fps);
6604                    available_hfr_configs.add(fps);
6605                    available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6606                }// if
6607            }// for (...; j < len;...)
6608       } //if (fps >= MIN_FPS_FOR_BATCH_MODE)
6609    }
6610    //Advertise HFR capability only if the property is set
6611    memset(prop, 0, sizeof(prop));
6612    property_get("persist.camera.hal3hfr.enable", prop, "1");
6613    uint8_t hfrEnable = (uint8_t)atoi(prop);
6614
6615    if(hfrEnable && available_hfr_configs.array()) {
6616        staticInfo.update(
6617                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6618                available_hfr_configs.array(), available_hfr_configs.size());
6619    }
6620
6621    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6622    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6623                      &max_jpeg_size, 1);
6624
6625    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6626    size_t size = 0;
6627    count = CAM_EFFECT_MODE_MAX;
6628    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6629    for (size_t i = 0; i < count; i++) {
6630        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6631                gCamCapability[cameraId]->supported_effects[i]);
6632        if (NAME_NOT_FOUND != val) {
6633            avail_effects[size] = (uint8_t)val;
6634            size++;
6635        }
6636    }
6637    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6638                      avail_effects,
6639                      size);
6640
6641    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6642    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6643    size_t supported_scene_modes_cnt = 0;
6644    count = CAM_SCENE_MODE_MAX;
6645    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6646    for (size_t i = 0; i < count; i++) {
6647        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6648                CAM_SCENE_MODE_OFF) {
6649            int val = lookupFwkName(SCENE_MODES_MAP,
6650                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
6651                    gCamCapability[cameraId]->supported_scene_modes[i]);
6652            if (NAME_NOT_FOUND != val) {
6653                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6654                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6655                supported_scene_modes_cnt++;
6656            }
6657        }
6658    }
6659    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6660                      avail_scene_modes,
6661                      supported_scene_modes_cnt);
6662
6663    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6664    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6665                      supported_scene_modes_cnt,
6666                      CAM_SCENE_MODE_MAX,
6667                      scene_mode_overrides,
6668                      supported_indexes,
6669                      cameraId);
6670
6671    if (supported_scene_modes_cnt == 0) {
6672        supported_scene_modes_cnt = 1;
6673        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6674    }
6675
6676    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6677            scene_mode_overrides, supported_scene_modes_cnt * 3);
6678
6679    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6680                                         ANDROID_CONTROL_MODE_AUTO,
6681                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6682    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6683            available_control_modes,
6684            3);
6685
6686    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6687    size = 0;
6688    count = CAM_ANTIBANDING_MODE_MAX;
6689    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6690    for (size_t i = 0; i < count; i++) {
6691        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6692                gCamCapability[cameraId]->supported_antibandings[i]);
6693        if (NAME_NOT_FOUND != val) {
6694            avail_antibanding_modes[size] = (uint8_t)val;
6695            size++;
6696        }
6697
6698    }
6699    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6700                      avail_antibanding_modes,
6701                      size);
6702
6703    uint8_t avail_abberation_modes[] = {
6704            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
6705            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
6706            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
6707    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6708    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6709    if (0 == count) {
6710        //  If no aberration correction modes are available for a device, this advertise OFF mode
6711        size = 1;
6712    } else {
6713        // If count is not zero then atleast one among the FAST or HIGH quality is supported
6714        // So, advertize all 3 modes if atleast any one mode is supported as per the
6715        // new M requirement
6716        size = 3;
6717    }
6718    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6719            avail_abberation_modes,
6720            size);
6721
6722    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6723    size = 0;
6724    count = CAM_FOCUS_MODE_MAX;
6725    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6726    for (size_t i = 0; i < count; i++) {
6727        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6728                gCamCapability[cameraId]->supported_focus_modes[i]);
6729        if (NAME_NOT_FOUND != val) {
6730            avail_af_modes[size] = (uint8_t)val;
6731            size++;
6732        }
6733    }
6734    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6735                      avail_af_modes,
6736                      size);
6737
6738    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6739    size = 0;
6740    count = CAM_WB_MODE_MAX;
6741    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6742    for (size_t i = 0; i < count; i++) {
6743        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6744                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6745                gCamCapability[cameraId]->supported_white_balances[i]);
6746        if (NAME_NOT_FOUND != val) {
6747            avail_awb_modes[size] = (uint8_t)val;
6748            size++;
6749        }
6750    }
6751    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6752                      avail_awb_modes,
6753                      size);
6754
6755    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6756    count = CAM_FLASH_FIRING_LEVEL_MAX;
6757    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6758            count);
6759    for (size_t i = 0; i < count; i++) {
6760        available_flash_levels[i] =
6761                gCamCapability[cameraId]->supported_firing_levels[i];
6762    }
6763    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6764            available_flash_levels, count);
6765
6766    uint8_t flashAvailable;
6767    if (gCamCapability[cameraId]->flash_available)
6768        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6769    else
6770        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6771    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6772            &flashAvailable, 1);
6773
6774    Vector<uint8_t> avail_ae_modes;
6775    count = CAM_AE_MODE_MAX;
6776    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6777    for (size_t i = 0; i < count; i++) {
6778        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6779    }
6780    if (flashAvailable) {
6781        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6782        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6783    }
6784    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6785                      avail_ae_modes.array(),
6786                      avail_ae_modes.size());
6787
6788    int32_t sensitivity_range[2];
6789    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6790    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6791    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6792                      sensitivity_range,
6793                      sizeof(sensitivity_range) / sizeof(int32_t));
6794
6795    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6796                      &gCamCapability[cameraId]->max_analog_sensitivity,
6797                      1);
6798
6799    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6800    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6801                      &sensor_orientation,
6802                      1);
6803
6804    int32_t max_output_streams[] = {
6805            MAX_STALLING_STREAMS,
6806            MAX_PROCESSED_STREAMS,
6807            MAX_RAW_STREAMS};
6808    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6809            max_output_streams,
6810            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6811
6812    uint8_t avail_leds = 0;
6813    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6814                      &avail_leds, 0);
6815
6816    uint8_t focus_dist_calibrated;
6817    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6818            gCamCapability[cameraId]->focus_dist_calibrated);
6819    if (NAME_NOT_FOUND != val) {
6820        focus_dist_calibrated = (uint8_t)val;
6821        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6822                     &focus_dist_calibrated, 1);
6823    }
6824
6825    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6826    size = 0;
6827    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6828            MAX_TEST_PATTERN_CNT);
6829    for (size_t i = 0; i < count; i++) {
6830        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6831                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6832        if (NAME_NOT_FOUND != testpatternMode) {
6833            avail_testpattern_modes[size] = testpatternMode;
6834            size++;
6835        }
6836    }
6837    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6838                      avail_testpattern_modes,
6839                      size);
6840
6841    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6842    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6843                      &max_pipeline_depth,
6844                      1);
6845
6846    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6847    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6848                      &partial_result_count,
6849                       1);
6850
6851    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6852    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6853
6854    Vector<uint8_t> available_capabilities;
6855    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6856    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6857    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6858    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6859    if (supportBurst) {
6860        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6861    }
6862    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6863    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6864    if (hfrEnable && available_hfr_configs.array()) {
6865        available_capabilities.add(
6866                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6867    }
6868
6869    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6870        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6871    }
6872    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6873            available_capabilities.array(),
6874            available_capabilities.size());
6875
6876    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
6877    //Assumption is that all bayer cameras support MANUAL_SENSOR.
6878    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6879            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6880
6881    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6882            &aeLockAvailable, 1);
6883
6884    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
6885    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
6886    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6887            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6888
6889    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6890            &awbLockAvailable, 1);
6891
6892    int32_t max_input_streams = 1;
6893    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6894                      &max_input_streams,
6895                      1);
6896
6897    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6898    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6899            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6900            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6901            HAL_PIXEL_FORMAT_YCbCr_420_888};
6902    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6903                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6904
6905    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6906    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6907                      &max_latency,
6908                      1);
6909
6910    int32_t isp_sensitivity_range[2];
6911    isp_sensitivity_range[0] =
6912        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
6913    isp_sensitivity_range[1] =
6914        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
6915    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
6916                      isp_sensitivity_range,
6917                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
6918
6919    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6920                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6921    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6922            available_hot_pixel_modes,
6923            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6924
6925    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6926                                         ANDROID_SHADING_MODE_FAST,
6927                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6928    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6929                      available_shading_modes,
6930                      3);
6931
6932    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6933                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6934    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6935                      available_lens_shading_map_modes,
6936                      2);
6937
6938    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6939                                      ANDROID_EDGE_MODE_FAST,
6940                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6941                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6942    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6943            available_edge_modes,
6944            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6945
6946    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6947                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6948                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6949                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6950                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6951    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6952            available_noise_red_modes,
6953            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6954
6955    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6956                                         ANDROID_TONEMAP_MODE_FAST,
6957                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6958    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6959            available_tonemap_modes,
6960            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6961
6962    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6963    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6964            available_hot_pixel_map_modes,
6965            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6966
6967    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6968            gCamCapability[cameraId]->reference_illuminant1);
6969    if (NAME_NOT_FOUND != val) {
6970        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6971        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6972    }
6973
6974    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6975            gCamCapability[cameraId]->reference_illuminant2);
6976    if (NAME_NOT_FOUND != val) {
6977        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6978        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6979    }
6980
6981    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6982            (void *)gCamCapability[cameraId]->forward_matrix1,
6983            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6984
6985    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6986            (void *)gCamCapability[cameraId]->forward_matrix2,
6987            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6988
6989    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6990            (void *)gCamCapability[cameraId]->color_transform1,
6991            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6992
6993    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6994            (void *)gCamCapability[cameraId]->color_transform2,
6995            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6996
6997    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6998            (void *)gCamCapability[cameraId]->calibration_transform1,
6999            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7000
7001    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7002            (void *)gCamCapability[cameraId]->calibration_transform2,
7003            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7004
7005    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7006       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7007       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7008       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7009       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7010       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7011       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7012       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7013       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7014       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7015       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7016       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7017       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7018       ANDROID_JPEG_GPS_COORDINATES,
7019       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7020       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7021       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7022       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7023       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7024       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7025       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7026       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7027       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7028       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7029       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7030       ANDROID_STATISTICS_FACE_DETECT_MODE,
7031       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7032       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7033       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7034       ANDROID_BLACK_LEVEL_LOCK };
7035
7036    size_t request_keys_cnt =
7037            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7038    Vector<int32_t> available_request_keys;
7039    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7040    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7041        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7042    }
7043
7044    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7045            available_request_keys.array(), available_request_keys.size());
7046
7047    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7048       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7049       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7050       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7051       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7052       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7053       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7054       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7055       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7056       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7057       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7058       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7059       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7060       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7061       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7062       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7063       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7064       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7065       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7066       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7067       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7068       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7069       ANDROID_STATISTICS_FACE_SCORES,
7070       ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
7071       ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7072       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
7073    size_t result_keys_cnt =
7074            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7075
7076    Vector<int32_t> available_result_keys;
7077    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7078    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7079        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7080    }
7081    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7082        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7083        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7084    }
7085    if (supportedFaceDetectMode == 1) {
7086        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7087        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7088    } else if ((supportedFaceDetectMode == 2) ||
7089            (supportedFaceDetectMode == 3)) {
7090        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7091        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7092    }
7093    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7094            available_result_keys.array(), available_result_keys.size());
7095
7096    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7097       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7098       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7099       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7100       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7101       ANDROID_SCALER_CROPPING_TYPE,
7102       ANDROID_SYNC_MAX_LATENCY,
7103       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7104       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7105       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7106       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7107       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7108       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7109       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7110       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7111       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7112       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7113       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7114       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7115       ANDROID_LENS_FACING,
7116       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7117       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7118       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7119       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7120       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7121       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7122       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7123       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7124       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7125       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7126       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7127       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7128       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7129       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7130       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7131       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7132       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7133       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7134       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7135       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7136       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7137       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7138       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7139       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7140       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7141       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7142       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7143       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7144       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7145       ANDROID_CONTROL_AVAILABLE_MODES,
7146       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7147       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7148       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7149       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7150       ANDROID_SHADING_AVAILABLE_MODES,
7151       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7152       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7153
7154    Vector<int32_t> available_characteristics_keys;
7155    available_characteristics_keys.appendArray(characteristics_keys_basic,
7156            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7157    if (hasBlackRegions) {
7158        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7159    }
7160    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7161                      available_characteristics_keys.array(),
7162                      available_characteristics_keys.size());
7163
7164    /*available stall durations depend on the hw + sw and will be different for different devices */
7165    /*have to add for raw after implementation*/
7166    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7167    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7168
7169    Vector<int64_t> available_stall_durations;
7170    for (uint32_t j = 0; j < stall_formats_count; j++) {
7171        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7172            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7173                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7174                available_stall_durations.add(stall_formats[j]);
7175                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7176                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7177                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7178          }
7179        } else {
7180            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7181                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7182                available_stall_durations.add(stall_formats[j]);
7183                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7184                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7185                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7186            }
7187        }
7188    }
7189    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7190                      available_stall_durations.array(),
7191                      available_stall_durations.size());
7192
7193    //QCAMERA3_OPAQUE_RAW
7194    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7195    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7196    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7197    case LEGACY_RAW:
7198        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7199            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7200        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7201            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7202        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7203            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7204        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7205        break;
7206    case MIPI_RAW:
7207        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7208            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7209        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7210            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7211        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7212            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7213        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7214        break;
7215    default:
7216        LOGE("unknown opaque_raw_format %d",
7217                gCamCapability[cameraId]->opaque_raw_fmt);
7218        break;
7219    }
7220    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7221
7222    Vector<int32_t> strides;
7223    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7224            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7225        cam_stream_buf_plane_info_t buf_planes;
7226        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7227        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7228        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7229            &gCamCapability[cameraId]->padding_info, &buf_planes);
7230        strides.add(buf_planes.plane_info.mp[0].stride);
7231    }
7232    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7233            strides.size());
7234
7235    Vector<int32_t> opaque_size;
7236    for (size_t j = 0; j < scalar_formats_count; j++) {
7237        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7238            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7239                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7240                cam_stream_buf_plane_info_t buf_planes;
7241
7242                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7243                         &gCamCapability[cameraId]->padding_info, &buf_planes);
7244
7245                if (rc == 0) {
7246                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7247                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7248                    opaque_size.add(buf_planes.plane_info.frame_len);
7249                }else {
7250                    LOGE("raw frame calculation failed!");
7251                }
7252            }
7253        }
7254    }
7255
7256    if ((opaque_size.size() > 0) &&
7257            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7258        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7259    else
7260        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7261
7262    gStaticMetadata[cameraId] = staticInfo.release();
7263    return rc;
7264}
7265
7266/*===========================================================================
7267 * FUNCTION   : makeTable
7268 *
7269 * DESCRIPTION: make a table of sizes
7270 *
7271 * PARAMETERS :
7272 *
7273 *
7274 *==========================================================================*/
7275void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7276        size_t max_size, int32_t *sizeTable)
7277{
7278    size_t j = 0;
7279    if (size > max_size) {
7280       size = max_size;
7281    }
7282    for (size_t i = 0; i < size; i++) {
7283        sizeTable[j] = dimTable[i].width;
7284        sizeTable[j+1] = dimTable[i].height;
7285        j+=2;
7286    }
7287}
7288
7289/*===========================================================================
7290 * FUNCTION   : makeFPSTable
7291 *
7292 * DESCRIPTION: make a table of fps ranges
7293 *
7294 * PARAMETERS :
7295 *
7296 *==========================================================================*/
7297void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7298        size_t max_size, int32_t *fpsRangesTable)
7299{
7300    size_t j = 0;
7301    if (size > max_size) {
7302       size = max_size;
7303    }
7304    for (size_t i = 0; i < size; i++) {
7305        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7306        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7307        j+=2;
7308    }
7309}
7310
7311/*===========================================================================
7312 * FUNCTION   : makeOverridesList
7313 *
7314 * DESCRIPTION: make a list of scene mode overrides
7315 *
7316 * PARAMETERS :
7317 *
7318 *
7319 *==========================================================================*/
7320void QCamera3HardwareInterface::makeOverridesList(
7321        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7322        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7323{
7324    /*daemon will give a list of overrides for all scene modes.
7325      However we should send the fwk only the overrides for the scene modes
7326      supported by the framework*/
7327    size_t j = 0;
7328    if (size > max_size) {
7329       size = max_size;
7330    }
7331    size_t focus_count = CAM_FOCUS_MODE_MAX;
7332    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7333            focus_count);
7334    for (size_t i = 0; i < size; i++) {
7335        bool supt = false;
7336        size_t index = supported_indexes[i];
7337        overridesList[j] = gCamCapability[camera_id]->flash_available ?
7338                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7339        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7340                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7341                overridesTable[index].awb_mode);
7342        if (NAME_NOT_FOUND != val) {
7343            overridesList[j+1] = (uint8_t)val;
7344        }
7345        uint8_t focus_override = overridesTable[index].af_mode;
7346        for (size_t k = 0; k < focus_count; k++) {
7347           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7348              supt = true;
7349              break;
7350           }
7351        }
7352        if (supt) {
7353            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7354                    focus_override);
7355            if (NAME_NOT_FOUND != val) {
7356                overridesList[j+2] = (uint8_t)val;
7357            }
7358        } else {
7359           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7360        }
7361        j+=3;
7362    }
7363}
7364
7365/*===========================================================================
7366 * FUNCTION   : filterJpegSizes
7367 *
7368 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7369 *              could be downscaled to
7370 *
7371 * PARAMETERS :
7372 *
7373 * RETURN     : length of jpegSizes array
7374 *==========================================================================*/
7375
7376size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7377        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7378        uint8_t downscale_factor)
7379{
7380    if (0 == downscale_factor) {
7381        downscale_factor = 1;
7382    }
7383
7384    int32_t min_width = active_array_size.width / downscale_factor;
7385    int32_t min_height = active_array_size.height / downscale_factor;
7386    size_t jpegSizesCnt = 0;
7387    if (processedSizesCnt > maxCount) {
7388        processedSizesCnt = maxCount;
7389    }
7390    for (size_t i = 0; i < processedSizesCnt; i+=2) {
7391        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7392            jpegSizes[jpegSizesCnt] = processedSizes[i];
7393            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7394            jpegSizesCnt += 2;
7395        }
7396    }
7397    return jpegSizesCnt;
7398}
7399
7400/*===========================================================================
7401 * FUNCTION   : computeNoiseModelEntryS
7402 *
7403 * DESCRIPTION: function to map a given sensitivity to the S noise
7404 *              model parameters in the DNG noise model.
7405 *
7406 * PARAMETERS : sens : the sensor sensitivity
7407 *
7408 ** RETURN    : S (sensor amplification) noise
7409 *
7410 *==========================================================================*/
7411double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7412    double s = gCamCapability[mCameraId]->gradient_S * sens +
7413            gCamCapability[mCameraId]->offset_S;
7414    return ((s < 0.0) ? 0.0 : s);
7415}
7416
7417/*===========================================================================
7418 * FUNCTION   : computeNoiseModelEntryO
7419 *
7420 * DESCRIPTION: function to map a given sensitivity to the O noise
7421 *              model parameters in the DNG noise model.
7422 *
7423 * PARAMETERS : sens : the sensor sensitivity
7424 *
7425 ** RETURN    : O (sensor readout) noise
7426 *
7427 *==========================================================================*/
7428double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7429    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7430    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7431            1.0 : (1.0 * sens / max_analog_sens);
7432    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7433            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7434    return ((o < 0.0) ? 0.0 : o);
7435}
7436
7437/*===========================================================================
7438 * FUNCTION   : getSensorSensitivity
7439 *
7440 * DESCRIPTION: convert iso_mode to an integer value
7441 *
7442 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7443 *
7444 ** RETURN    : sensitivity supported by sensor
7445 *
7446 *==========================================================================*/
7447int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7448{
7449    int32_t sensitivity;
7450
7451    switch (iso_mode) {
7452    case CAM_ISO_MODE_100:
7453        sensitivity = 100;
7454        break;
7455    case CAM_ISO_MODE_200:
7456        sensitivity = 200;
7457        break;
7458    case CAM_ISO_MODE_400:
7459        sensitivity = 400;
7460        break;
7461    case CAM_ISO_MODE_800:
7462        sensitivity = 800;
7463        break;
7464    case CAM_ISO_MODE_1600:
7465        sensitivity = 1600;
7466        break;
7467    default:
7468        sensitivity = -1;
7469        break;
7470    }
7471    return sensitivity;
7472}
7473
7474/*===========================================================================
7475 * FUNCTION   : getCamInfo
7476 *
7477 * DESCRIPTION: query camera capabilities
7478 *
7479 * PARAMETERS :
7480 *   @cameraId  : camera Id
7481 *   @info      : camera info struct to be filled in with camera capabilities
7482 *
7483 * RETURN     : int type of status
7484 *              NO_ERROR  -- success
7485 *              none-zero failure code
7486 *==========================================================================*/
7487int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7488        struct camera_info *info)
7489{
7490    ATRACE_CALL();
7491    int rc = 0;
7492
7493    pthread_mutex_lock(&gCamLock);
7494    if (NULL == gCamCapability[cameraId]) {
7495        rc = initCapabilities(cameraId);
7496        if (rc < 0) {
7497            pthread_mutex_unlock(&gCamLock);
7498            return rc;
7499        }
7500    }
7501
7502    if (NULL == gStaticMetadata[cameraId]) {
7503        rc = initStaticMetadata(cameraId);
7504        if (rc < 0) {
7505            pthread_mutex_unlock(&gCamLock);
7506            return rc;
7507        }
7508    }
7509
7510    switch(gCamCapability[cameraId]->position) {
7511    case CAM_POSITION_BACK:
7512        info->facing = CAMERA_FACING_BACK;
7513        break;
7514
7515    case CAM_POSITION_FRONT:
7516        info->facing = CAMERA_FACING_FRONT;
7517        break;
7518
7519    default:
7520        LOGE("Unknown position type for camera id:%d", cameraId);
7521        rc = -1;
7522        break;
7523    }
7524
7525
7526    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7527    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
7528    info->static_camera_characteristics = gStaticMetadata[cameraId];
7529
7530    //For now assume both cameras can operate independently.
7531    info->conflicting_devices = NULL;
7532    info->conflicting_devices_length = 0;
7533
7534    //resource cost is 100 * MIN(1.0, m/M),
7535    //where m is throughput requirement with maximum stream configuration
7536    //and M is CPP maximum throughput.
7537    float max_fps = 0.0;
7538    for (uint32_t i = 0;
7539            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7540        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7541            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7542    }
7543    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7544            gCamCapability[cameraId]->active_array_size.width *
7545            gCamCapability[cameraId]->active_array_size.height * max_fps /
7546            gCamCapability[cameraId]->max_pixel_bandwidth;
7547    info->resource_cost = 100 * MIN(1.0, ratio);
7548    LOGI("camera %d resource cost is %d", cameraId,
7549            info->resource_cost);
7550
7551    pthread_mutex_unlock(&gCamLock);
7552    return rc;
7553}
7554
7555/*===========================================================================
7556 * FUNCTION   : translateCapabilityToMetadata
7557 *
7558 * DESCRIPTION: translate the capability into camera_metadata_t
7559 *
7560 * PARAMETERS : type of the request
7561 *
7562 *
7563 * RETURN     : success: camera_metadata_t*
7564 *              failure: NULL
7565 *
7566 *==========================================================================*/
7567camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7568{
7569    if (mDefaultMetadata[type] != NULL) {
7570        return mDefaultMetadata[type];
7571    }
7572    //first time we are handling this request
7573    //fill up the metadata structure using the wrapper class
7574    CameraMetadata settings;
7575    //translate from cam_capability_t to camera_metadata_tag_t
7576    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7577    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7578    int32_t defaultRequestID = 0;
7579    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7580
7581    /* OIS disable */
7582    char ois_prop[PROPERTY_VALUE_MAX];
7583    memset(ois_prop, 0, sizeof(ois_prop));
7584    property_get("persist.camera.ois.disable", ois_prop, "0");
7585    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7586
7587    /* Force video to use OIS */
7588    char videoOisProp[PROPERTY_VALUE_MAX];
7589    memset(videoOisProp, 0, sizeof(videoOisProp));
7590    property_get("persist.camera.ois.video", videoOisProp, "1");
7591    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7592
7593    // EIS enable/disable
7594    char eis_prop[PROPERTY_VALUE_MAX];
7595    memset(eis_prop, 0, sizeof(eis_prop));
7596    property_get("persist.camera.eis.enable", eis_prop, "0");
7597    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7598
7599    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7600    // This is a bit hacky. EIS is enabled only when the above setprop
7601    // is set to non-zero value and on back camera (for 2015 Nexus).
7602    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7603    // configureStream is called before this function. In other words,
7604    // we cannot guarantee the app will call configureStream before
7605    // calling createDefaultRequest.
7606    const bool eisEnabled = facingBack && eis_prop_set;
7607
7608    uint8_t controlIntent = 0;
7609    uint8_t focusMode;
7610    uint8_t vsMode;
7611    uint8_t optStabMode;
7612    uint8_t cacMode;
7613    uint8_t edge_mode;
7614    uint8_t noise_red_mode;
7615    uint8_t tonemap_mode;
7616    bool highQualityModeEntryAvailable = FALSE;
7617    bool fastModeEntryAvailable = FALSE;
7618    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7619    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7620    switch (type) {
7621      case CAMERA3_TEMPLATE_PREVIEW:
7622        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7623        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7624        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7625        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7626        edge_mode = ANDROID_EDGE_MODE_FAST;
7627        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7628        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7629        break;
7630      case CAMERA3_TEMPLATE_STILL_CAPTURE:
7631        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7632        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7633        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7634        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7635        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7636        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7637        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7638        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
7639        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
7640            if (gCamCapability[mCameraId]->aberration_modes[i] ==
7641                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
7642                highQualityModeEntryAvailable = TRUE;
7643            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
7644                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
7645                fastModeEntryAvailable = TRUE;
7646            }
7647        }
7648        if (highQualityModeEntryAvailable) {
7649            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7650        } else if (fastModeEntryAvailable) {
7651            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7652        }
7653        break;
7654      case CAMERA3_TEMPLATE_VIDEO_RECORD:
7655        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7656        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7657        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7658        if (eisEnabled) {
7659            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7660        }
7661        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7662        edge_mode = ANDROID_EDGE_MODE_FAST;
7663        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7664        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7665        if (forceVideoOis)
7666            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7667        break;
7668      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7669        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7670        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7671        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7672        if (eisEnabled) {
7673            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7674        }
7675        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7676        edge_mode = ANDROID_EDGE_MODE_FAST;
7677        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7678        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7679        if (forceVideoOis)
7680            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7681        break;
7682      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7683        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7684        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7685        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7686        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7687        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7688        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7689        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7690        break;
7691      case CAMERA3_TEMPLATE_MANUAL:
7692        edge_mode = ANDROID_EDGE_MODE_FAST;
7693        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7694        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7695        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7696        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7697        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7698        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7699        break;
7700      default:
7701        edge_mode = ANDROID_EDGE_MODE_FAST;
7702        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7703        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7704        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7705        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7706        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7707        break;
7708    }
7709    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7710    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7711    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7712    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7713        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7714    }
7715    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7716
7717    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7718            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7719        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7720    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7721            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7722            || ois_disable)
7723        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7724    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7725
7726    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7727            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7728
7729    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7730    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7731
7732    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7733    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7734
7735    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7736    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7737
7738    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7739    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7740
7741    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7742    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7743
7744    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7745    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7746
7747    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7748    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7749
7750    /*flash*/
7751    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7752    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7753
7754    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7755    settings.update(ANDROID_FLASH_FIRING_POWER,
7756            &flashFiringLevel, 1);
7757
7758    /* lens */
7759    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7760    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7761
7762    if (gCamCapability[mCameraId]->filter_densities_count) {
7763        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7764        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7765                        gCamCapability[mCameraId]->filter_densities_count);
7766    }
7767
7768    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7769    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7770
7771    float default_focus_distance = 0;
7772    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7773
7774    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7775    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7776
7777    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7778    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7779
7780    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7781    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7782
7783    /* face detection (default to OFF) */
7784    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7785    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7786
7787    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7788    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7789
7790    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7791    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7792
7793    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7794    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7795
7796    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7797    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7798
7799    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7800    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7801
7802    /* Exposure time(Update the Min Exposure Time)*/
7803    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7804    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7805
7806    /* frame duration */
7807    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7808    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7809
7810    /* sensitivity */
7811    static const int32_t default_sensitivity = 100;
7812    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7813
7814    /*edge mode*/
7815    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7816
7817    /*noise reduction mode*/
7818    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7819
7820    /*color correction mode*/
7821    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7822    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7823
7824    /*transform matrix mode*/
7825    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7826
7827    int32_t scaler_crop_region[4];
7828    scaler_crop_region[0] = 0;
7829    scaler_crop_region[1] = 0;
7830    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7831    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7832    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7833
7834    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7835    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7836
7837    /*focus distance*/
7838    float focus_distance = 0.0;
7839    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7840
7841    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7842    float max_range = 0.0;
7843    float max_fixed_fps = 0.0;
7844    int32_t fps_range[2] = {0, 0};
7845    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7846            i++) {
7847        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7848            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7849        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7850                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7851                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7852            if (range > max_range) {
7853                fps_range[0] =
7854                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7855                fps_range[1] =
7856                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7857                max_range = range;
7858            }
7859        } else {
7860            if (range < 0.01 && max_fixed_fps <
7861                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7862                fps_range[0] =
7863                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7864                fps_range[1] =
7865                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7866                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7867            }
7868        }
7869    }
7870    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7871
7872    /*precapture trigger*/
7873    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7874    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7875
7876    /*af trigger*/
7877    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7878    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7879
7880    /* ae & af regions */
7881    int32_t active_region[] = {
7882            gCamCapability[mCameraId]->active_array_size.left,
7883            gCamCapability[mCameraId]->active_array_size.top,
7884            gCamCapability[mCameraId]->active_array_size.left +
7885                    gCamCapability[mCameraId]->active_array_size.width,
7886            gCamCapability[mCameraId]->active_array_size.top +
7887                    gCamCapability[mCameraId]->active_array_size.height,
7888            0};
7889    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7890            sizeof(active_region) / sizeof(active_region[0]));
7891    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7892            sizeof(active_region) / sizeof(active_region[0]));
7893
7894    /* black level lock */
7895    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7896    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7897
7898    /* lens shading map mode */
7899    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7900    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7901        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7902    }
7903    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7904
7905    //special defaults for manual template
7906    if (type == CAMERA3_TEMPLATE_MANUAL) {
7907        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7908        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7909
7910        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7911        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7912
7913        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7914        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7915
7916        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7917        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7918
7919        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7920        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7921
7922        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7923        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7924    }
7925
7926
7927    /* TNR
7928     * We'll use this location to determine which modes TNR will be set.
7929     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7930     * This is not to be confused with linking on a per stream basis that decision
7931     * is still on per-session basis and will be handled as part of config stream
7932     */
7933    uint8_t tnr_enable = 0;
7934
7935    if (m_bTnrPreview || m_bTnrVideo) {
7936
7937        switch (type) {
7938            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7939            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7940                    tnr_enable = 1;
7941                    break;
7942
7943            default:
7944                    tnr_enable = 0;
7945                    break;
7946        }
7947
7948        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7949        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7950        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7951
7952        LOGD("TNR:%d with process plate %d for template:%d",
7953                             tnr_enable, tnr_process_type, type);
7954    }
7955
7956    /* CDS default */
7957    char prop[PROPERTY_VALUE_MAX];
7958    memset(prop, 0, sizeof(prop));
7959    property_get("persist.camera.CDS", prop, "Auto");
7960    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7961    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7962    if (CAM_CDS_MODE_MAX == cds_mode) {
7963        cds_mode = CAM_CDS_MODE_AUTO;
7964    }
7965
7966    /* Disabling CDS in templates which have TNR enabled*/
7967    if (tnr_enable)
7968        cds_mode = CAM_CDS_MODE_OFF;
7969
7970    int32_t mode = cds_mode;
7971    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7972    mDefaultMetadata[type] = settings.release();
7973
7974    return mDefaultMetadata[type];
7975}
7976
7977/*===========================================================================
7978 * FUNCTION   : setFrameParameters
7979 *
7980 * DESCRIPTION: set parameters per frame as requested in the metadata from
7981 *              framework
7982 *
7983 * PARAMETERS :
7984 *   @request   : request that needs to be serviced
7985 *   @streamID : Stream ID of all the requested streams
7986 *   @blob_request: Whether this request is a blob request or not
7987 *
7988 * RETURN     : success: NO_ERROR
7989 *              failure:
7990 *==========================================================================*/
7991int QCamera3HardwareInterface::setFrameParameters(
7992                    camera3_capture_request_t *request,
7993                    cam_stream_ID_t streamID,
7994                    int blob_request,
7995                    uint32_t snapshotStreamId)
7996{
7997    /*translate from camera_metadata_t type to parm_type_t*/
7998    int rc = 0;
7999    int32_t hal_version = CAM_HAL_V3;
8000
8001    clear_metadata_buffer(mParameters);
8002    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8003        LOGE("Failed to set hal version in the parameters");
8004        return BAD_VALUE;
8005    }
8006
8007    /*we need to update the frame number in the parameters*/
8008    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8009            request->frame_number)) {
8010        LOGE("Failed to set the frame number in the parameters");
8011        return BAD_VALUE;
8012    }
8013
8014    /* Update stream id of all the requested buffers */
8015    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8016        LOGE("Failed to set stream type mask in the parameters");
8017        return BAD_VALUE;
8018    }
8019
8020    if (mUpdateDebugLevel) {
8021        uint32_t dummyDebugLevel = 0;
8022        /* The value of dummyDebugLevel is irrelavent. On
8023         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8024        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8025                dummyDebugLevel)) {
8026            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8027            return BAD_VALUE;
8028        }
8029        mUpdateDebugLevel = false;
8030    }
8031
8032    if(request->settings != NULL){
8033        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8034        if (blob_request)
8035            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8036    }
8037
8038    return rc;
8039}
8040
8041/*===========================================================================
8042 * FUNCTION   : setReprocParameters
8043 *
8044 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8045 *              return it.
8046 *
8047 * PARAMETERS :
8048 *   @request   : request that needs to be serviced
8049 *
8050 * RETURN     : success: NO_ERROR
8051 *              failure:
8052 *==========================================================================*/
8053int32_t QCamera3HardwareInterface::setReprocParameters(
8054        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8055        uint32_t snapshotStreamId)
8056{
8057    /*translate from camera_metadata_t type to parm_type_t*/
8058    int rc = 0;
8059
8060    if (NULL == request->settings){
8061        LOGE("Reprocess settings cannot be NULL");
8062        return BAD_VALUE;
8063    }
8064
8065    if (NULL == reprocParam) {
8066        LOGE("Invalid reprocessing metadata buffer");
8067        return BAD_VALUE;
8068    }
8069    clear_metadata_buffer(reprocParam);
8070
8071    /*we need to update the frame number in the parameters*/
8072    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8073            request->frame_number)) {
8074        LOGE("Failed to set the frame number in the parameters");
8075        return BAD_VALUE;
8076    }
8077
8078    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8079    if (rc < 0) {
8080        LOGE("Failed to translate reproc request");
8081        return rc;
8082    }
8083
8084    CameraMetadata frame_settings;
8085    frame_settings = request->settings;
8086    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8087            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8088        int32_t *crop_count =
8089                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8090        int32_t *crop_data =
8091                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8092        int32_t *roi_map =
8093                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8094        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8095            cam_crop_data_t crop_meta;
8096            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8097            crop_meta.num_of_streams = 1;
8098            crop_meta.crop_info[0].crop.left   = crop_data[0];
8099            crop_meta.crop_info[0].crop.top    = crop_data[1];
8100            crop_meta.crop_info[0].crop.width  = crop_data[2];
8101            crop_meta.crop_info[0].crop.height = crop_data[3];
8102
8103            crop_meta.crop_info[0].roi_map.left =
8104                    roi_map[0];
8105            crop_meta.crop_info[0].roi_map.top =
8106                    roi_map[1];
8107            crop_meta.crop_info[0].roi_map.width =
8108                    roi_map[2];
8109            crop_meta.crop_info[0].roi_map.height =
8110                    roi_map[3];
8111
8112            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8113                rc = BAD_VALUE;
8114            }
8115            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8116                    request->input_buffer->stream,
8117                    crop_meta.crop_info[0].crop.left,
8118                    crop_meta.crop_info[0].crop.top,
8119                    crop_meta.crop_info[0].crop.width,
8120                    crop_meta.crop_info[0].crop.height);
8121            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8122                    request->input_buffer->stream,
8123                    crop_meta.crop_info[0].roi_map.left,
8124                    crop_meta.crop_info[0].roi_map.top,
8125                    crop_meta.crop_info[0].roi_map.width,
8126                    crop_meta.crop_info[0].roi_map.height);
8127            } else {
8128                LOGE("Invalid reprocess crop count %d!", *crop_count);
8129            }
8130    } else {
8131        LOGE("No crop data from matching output stream");
8132    }
8133
8134    /* These settings are not needed for regular requests so handle them specially for
8135       reprocess requests; information needed for EXIF tags */
8136    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8137        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8138                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8139        if (NAME_NOT_FOUND != val) {
8140            uint32_t flashMode = (uint32_t)val;
8141            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8142                rc = BAD_VALUE;
8143            }
8144        } else {
8145            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8146                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8147        }
8148    } else {
8149        LOGH("No flash mode in reprocess settings");
8150    }
8151
8152    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8153        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8154        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8155            rc = BAD_VALUE;
8156        }
8157    } else {
8158        LOGH("No flash state in reprocess settings");
8159    }
8160
8161    return rc;
8162}
8163
8164/*===========================================================================
8165 * FUNCTION   : saveRequestSettings
8166 *
8167 * DESCRIPTION: Add any settings that might have changed to the request settings
8168 *              and save the settings to be applied on the frame
8169 *
8170 * PARAMETERS :
8171 *   @jpegMetadata : the extracted and/or modified jpeg metadata
8172 *   @request      : request with initial settings
8173 *
8174 * RETURN     :
8175 * camera_metadata_t* : pointer to the saved request settings
8176 *==========================================================================*/
8177camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8178        const CameraMetadata &jpegMetadata,
8179        camera3_capture_request_t *request)
8180{
8181    camera_metadata_t *resultMetadata;
8182    CameraMetadata camMetadata;
8183    camMetadata = request->settings;
8184
8185    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8186        int32_t thumbnail_size[2];
8187        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8188        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8189        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8190                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8191    }
8192
8193    resultMetadata = camMetadata.release();
8194    return resultMetadata;
8195}
8196
8197/*===========================================================================
8198 * FUNCTION   : setHalFpsRange
8199 *
8200 * DESCRIPTION: set FPS range parameter
8201 *
8202 *
8203 * PARAMETERS :
8204 *   @settings    : Metadata from framework
8205 *   @hal_metadata: Metadata buffer
8206 *
8207 *
8208 * RETURN     : success: NO_ERROR
8209 *              failure:
8210 *==========================================================================*/
8211int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8212        metadata_buffer_t *hal_metadata)
8213{
8214    int32_t rc = NO_ERROR;
8215    cam_fps_range_t fps_range;
8216    fps_range.min_fps = (float)
8217            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8218    fps_range.max_fps = (float)
8219            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8220    fps_range.video_min_fps = fps_range.min_fps;
8221    fps_range.video_max_fps = fps_range.max_fps;
8222
8223    LOGD("aeTargetFpsRange fps: [%f %f]",
8224            fps_range.min_fps, fps_range.max_fps);
8225    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8226     * follows:
8227     * ---------------------------------------------------------------|
8228     *      Video stream is absent in configure_streams               |
8229     *    (Camcorder preview before the first video record            |
8230     * ---------------------------------------------------------------|
8231     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8232     *                   |             |             | vid_min/max_fps|
8233     * ---------------------------------------------------------------|
8234     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8235     *                   |-------------|-------------|----------------|
8236     *                   |  [240, 240] |     240     |  [240, 240]    |
8237     * ---------------------------------------------------------------|
8238     *     Video stream is present in configure_streams               |
8239     * ---------------------------------------------------------------|
8240     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8241     *                   |             |             | vid_min/max_fps|
8242     * ---------------------------------------------------------------|
8243     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8244     * (camcorder prev   |-------------|-------------|----------------|
8245     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
8246     *  is stopped)      |             |             |                |
8247     * ---------------------------------------------------------------|
8248     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
8249     *                   |-------------|-------------|----------------|
8250     *                   |  [240, 240] |     240     |  [240, 240]    |
8251     * ---------------------------------------------------------------|
8252     * When Video stream is absent in configure_streams,
8253     * preview fps = sensor_fps / batchsize
8254     * Eg: for 240fps at batchSize 4, preview = 60fps
8255     *     for 120fps at batchSize 4, preview = 30fps
8256     *
8257     * When video stream is present in configure_streams, preview fps is as per
8258     * the ratio of preview buffers to video buffers requested in process
8259     * capture request
8260     */
8261    mBatchSize = 0;
8262    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8263        fps_range.min_fps = fps_range.video_max_fps;
8264        fps_range.video_min_fps = fps_range.video_max_fps;
8265        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8266                fps_range.max_fps);
8267        if (NAME_NOT_FOUND != val) {
8268            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8269            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8270                return BAD_VALUE;
8271            }
8272
8273            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8274                /* If batchmode is currently in progress and the fps changes,
8275                 * set the flag to restart the sensor */
8276                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8277                        (mHFRVideoFps != fps_range.max_fps)) {
8278                    mNeedSensorRestart = true;
8279                }
8280                mHFRVideoFps = fps_range.max_fps;
8281                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8282                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8283                    mBatchSize = MAX_HFR_BATCH_SIZE;
8284                }
8285             }
8286            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
8287
8288         }
8289    } else {
8290        /* HFR mode is session param in backend/ISP. This should be reset when
8291         * in non-HFR mode  */
8292        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8293        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8294            return BAD_VALUE;
8295        }
8296    }
8297    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8298        return BAD_VALUE;
8299    }
8300    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
8301            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8302    return rc;
8303}
8304
8305/*===========================================================================
8306 * FUNCTION   : translateToHalMetadata
8307 *
8308 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
8309 *
8310 *
8311 * PARAMETERS :
8312 *   @request  : request sent from framework
8313 *
8314 *
8315 * RETURN     : success: NO_ERROR
8316 *              failure:
8317 *==========================================================================*/
8318int QCamera3HardwareInterface::translateToHalMetadata
8319                                  (const camera3_capture_request_t *request,
8320                                   metadata_buffer_t *hal_metadata,
8321                                   uint32_t snapshotStreamId)
8322{
8323    int rc = 0;
8324    CameraMetadata frame_settings;
8325    frame_settings = request->settings;
8326
8327    /* Do not change the order of the following list unless you know what you are
8328     * doing.
8329     * The order is laid out in such a way that parameters in the front of the table
8330     * may be used to override the parameters later in the table. Examples are:
8331     * 1. META_MODE should precede AEC/AWB/AF MODE
8332     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
8333     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
8334     * 4. Any mode should precede it's corresponding settings
8335     */
8336    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
8337        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
8338        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
8339            rc = BAD_VALUE;
8340        }
8341        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8342        if (rc != NO_ERROR) {
8343            LOGE("extractSceneMode failed");
8344        }
8345    }
8346
8347    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8348        uint8_t fwk_aeMode =
8349            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8350        uint8_t aeMode;
8351        int32_t redeye;
8352
8353        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8354            aeMode = CAM_AE_MODE_OFF;
8355        } else {
8356            aeMode = CAM_AE_MODE_ON;
8357        }
8358        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8359            redeye = 1;
8360        } else {
8361            redeye = 0;
8362        }
8363
8364        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8365                fwk_aeMode);
8366        if (NAME_NOT_FOUND != val) {
8367            int32_t flashMode = (int32_t)val;
8368            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8369        }
8370
8371        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8372        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8373            rc = BAD_VALUE;
8374        }
8375    }
8376
8377    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8378        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8379        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8380                fwk_whiteLevel);
8381        if (NAME_NOT_FOUND != val) {
8382            uint8_t whiteLevel = (uint8_t)val;
8383            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8384                rc = BAD_VALUE;
8385            }
8386        }
8387    }
8388
8389    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8390        uint8_t fwk_cacMode =
8391                frame_settings.find(
8392                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8393        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8394                fwk_cacMode);
8395        if (NAME_NOT_FOUND != val) {
8396            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8397            bool entryAvailable = FALSE;
8398            // Check whether Frameworks set CAC mode is supported in device or not
8399            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8400                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
8401                    entryAvailable = TRUE;
8402                    break;
8403                }
8404            }
8405            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
8406            // If entry not found then set the device supported mode instead of frameworks mode i.e,
8407            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
8408            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
8409            if (entryAvailable == FALSE) {
8410                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8411                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8412                } else {
8413                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8414                        // High is not supported and so set the FAST as spec say's underlying
8415                        // device implementation can be the same for both modes.
8416                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
8417                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8418                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
8419                        // in order to avoid the fps drop due to high quality
8420                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8421                    } else {
8422                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8423                    }
8424                }
8425            }
8426            LOGD("Final cacMode is %d", cacMode);
8427            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8428                rc = BAD_VALUE;
8429            }
8430        } else {
8431            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
8432        }
8433    }
8434
8435    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8436        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8437        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8438                fwk_focusMode);
8439        if (NAME_NOT_FOUND != val) {
8440            uint8_t focusMode = (uint8_t)val;
8441            LOGD("set focus mode %d", focusMode);
8442            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8443                rc = BAD_VALUE;
8444            }
8445        }
8446    }
8447
8448    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8449        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8450        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8451                focalDistance)) {
8452            rc = BAD_VALUE;
8453        }
8454    }
8455
8456    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8457        uint8_t fwk_antibandingMode =
8458                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8459        int val = lookupHalName(ANTIBANDING_MODES_MAP,
8460                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8461        if (NAME_NOT_FOUND != val) {
8462            uint32_t hal_antibandingMode = (uint32_t)val;
8463            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8464                    hal_antibandingMode)) {
8465                rc = BAD_VALUE;
8466            }
8467        }
8468    }
8469
8470    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8471        int32_t expCompensation = frame_settings.find(
8472                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8473        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8474            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8475        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8476            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8477        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8478                expCompensation)) {
8479            rc = BAD_VALUE;
8480        }
8481    }
8482
8483    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8484        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8485        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8486            rc = BAD_VALUE;
8487        }
8488    }
8489    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8490        rc = setHalFpsRange(frame_settings, hal_metadata);
8491        if (rc != NO_ERROR) {
8492            LOGE("setHalFpsRange failed");
8493        }
8494    }
8495
8496    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8497        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8498        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8499            rc = BAD_VALUE;
8500        }
8501    }
8502
8503    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8504        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8505        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8506                fwk_effectMode);
8507        if (NAME_NOT_FOUND != val) {
8508            uint8_t effectMode = (uint8_t)val;
8509            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8510                rc = BAD_VALUE;
8511            }
8512        }
8513    }
8514
8515    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8516        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8517        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8518                colorCorrectMode)) {
8519            rc = BAD_VALUE;
8520        }
8521    }
8522
8523    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8524        cam_color_correct_gains_t colorCorrectGains;
8525        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8526            colorCorrectGains.gains[i] =
8527                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8528        }
8529        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8530                colorCorrectGains)) {
8531            rc = BAD_VALUE;
8532        }
8533    }
8534
8535    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8536        cam_color_correct_matrix_t colorCorrectTransform;
8537        cam_rational_type_t transform_elem;
8538        size_t num = 0;
8539        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8540           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8541              transform_elem.numerator =
8542                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8543              transform_elem.denominator =
8544                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8545              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8546              num++;
8547           }
8548        }
8549        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8550                colorCorrectTransform)) {
8551            rc = BAD_VALUE;
8552        }
8553    }
8554
8555    cam_trigger_t aecTrigger;
8556    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8557    aecTrigger.trigger_id = -1;
8558    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8559        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8560        aecTrigger.trigger =
8561            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8562        aecTrigger.trigger_id =
8563            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8564        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8565                aecTrigger)) {
8566            rc = BAD_VALUE;
8567        }
8568        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
8569                aecTrigger.trigger, aecTrigger.trigger_id);
8570    }
8571
8572    /*af_trigger must come with a trigger id*/
8573    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8574        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8575        cam_trigger_t af_trigger;
8576        af_trigger.trigger =
8577            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8578        af_trigger.trigger_id =
8579            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8580        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8581            rc = BAD_VALUE;
8582        }
8583        LOGD("AfTrigger: %d AfTriggerID: %d",
8584                af_trigger.trigger, af_trigger.trigger_id);
8585    }
8586
8587    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8588        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8589        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8590            rc = BAD_VALUE;
8591        }
8592    }
8593    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8594        cam_edge_application_t edge_application;
8595        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8596        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8597            edge_application.sharpness = 0;
8598        } else {
8599            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8600        }
8601        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8602            rc = BAD_VALUE;
8603        }
8604    }
8605
8606    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8607        int32_t respectFlashMode = 1;
8608        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8609            uint8_t fwk_aeMode =
8610                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8611            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8612                respectFlashMode = 0;
8613                LOGH("AE Mode controls flash, ignore android.flash.mode");
8614            }
8615        }
8616        if (respectFlashMode) {
8617            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8618                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8619            LOGH("flash mode after mapping %d", val);
8620            // To check: CAM_INTF_META_FLASH_MODE usage
8621            if (NAME_NOT_FOUND != val) {
8622                uint8_t flashMode = (uint8_t)val;
8623                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8624                    rc = BAD_VALUE;
8625                }
8626            }
8627        }
8628    }
8629
8630    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8631        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8632        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8633            rc = BAD_VALUE;
8634        }
8635    }
8636
8637    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8638        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8639        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8640                flashFiringTime)) {
8641            rc = BAD_VALUE;
8642        }
8643    }
8644
8645    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8646        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8647        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8648                hotPixelMode)) {
8649            rc = BAD_VALUE;
8650        }
8651    }
8652
8653    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8654        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8655        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8656                lensAperture)) {
8657            rc = BAD_VALUE;
8658        }
8659    }
8660
8661    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8662        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8663        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8664                filterDensity)) {
8665            rc = BAD_VALUE;
8666        }
8667    }
8668
8669    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8670        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8671        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8672                focalLength)) {
8673            rc = BAD_VALUE;
8674        }
8675    }
8676
8677    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8678        uint8_t optStabMode =
8679                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8680        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8681                optStabMode)) {
8682            rc = BAD_VALUE;
8683        }
8684    }
8685
8686    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8687        uint8_t videoStabMode =
8688                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8689        LOGD("videoStabMode from APP = %d", videoStabMode);
8690        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8691                videoStabMode)) {
8692            rc = BAD_VALUE;
8693        }
8694    }
8695
8696
8697    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8698        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8699        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8700                noiseRedMode)) {
8701            rc = BAD_VALUE;
8702        }
8703    }
8704
8705    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8706        float reprocessEffectiveExposureFactor =
8707            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8708        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8709                reprocessEffectiveExposureFactor)) {
8710            rc = BAD_VALUE;
8711        }
8712    }
8713
8714    cam_crop_region_t scalerCropRegion;
8715    bool scalerCropSet = false;
8716    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8717        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8718        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8719        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8720        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8721
8722        // Map coordinate system from active array to sensor output.
8723        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8724                scalerCropRegion.width, scalerCropRegion.height);
8725
8726        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8727                scalerCropRegion)) {
8728            rc = BAD_VALUE;
8729        }
8730        scalerCropSet = true;
8731    }
8732
8733    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8734        int64_t sensorExpTime =
8735                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8736        LOGD("setting sensorExpTime %lld", sensorExpTime);
8737        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8738                sensorExpTime)) {
8739            rc = BAD_VALUE;
8740        }
8741    }
8742
8743    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8744        int64_t sensorFrameDuration =
8745                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8746        int64_t minFrameDuration = getMinFrameDuration(request);
8747        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8748        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8749            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8750        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
8751        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8752                sensorFrameDuration)) {
8753            rc = BAD_VALUE;
8754        }
8755    }
8756
8757    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8758        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8759        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8760                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8761        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8762                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8763        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
8764        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8765                sensorSensitivity)) {
8766            rc = BAD_VALUE;
8767        }
8768    }
8769
8770    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
8771        int32_t ispSensitivity =
8772            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
8773        if (ispSensitivity <
8774            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
8775                ispSensitivity =
8776                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8777                LOGD("clamp ispSensitivity to %d", ispSensitivity);
8778        }
8779        if (ispSensitivity >
8780            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
8781                ispSensitivity =
8782                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
8783                LOGD("clamp ispSensitivity to %d", ispSensitivity);
8784        }
8785        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
8786                ispSensitivity)) {
8787            rc = BAD_VALUE;
8788        }
8789    }
8790
8791    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8792        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8793        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8794            rc = BAD_VALUE;
8795        }
8796    }
8797
8798    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8799        uint8_t fwk_facedetectMode =
8800                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8801
8802        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8803                fwk_facedetectMode);
8804
8805        if (NAME_NOT_FOUND != val) {
8806            uint8_t facedetectMode = (uint8_t)val;
8807            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8808                    facedetectMode)) {
8809                rc = BAD_VALUE;
8810            }
8811        }
8812    }
8813
8814    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8815        uint8_t histogramMode =
8816                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8817        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8818                histogramMode)) {
8819            rc = BAD_VALUE;
8820        }
8821    }
8822
8823    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8824        uint8_t sharpnessMapMode =
8825                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8826        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8827                sharpnessMapMode)) {
8828            rc = BAD_VALUE;
8829        }
8830    }
8831
8832    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8833        uint8_t tonemapMode =
8834                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8835        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8836            rc = BAD_VALUE;
8837        }
8838    }
8839    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8840    /*All tonemap channels will have the same number of points*/
8841    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8842        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8843        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8844        cam_rgb_tonemap_curves tonemapCurves;
8845        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8846        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8847            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
8848                     tonemapCurves.tonemap_points_cnt,
8849                    CAM_MAX_TONEMAP_CURVE_SIZE);
8850            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8851        }
8852
8853        /* ch0 = G*/
8854        size_t point = 0;
8855        cam_tonemap_curve_t tonemapCurveGreen;
8856        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8857            for (size_t j = 0; j < 2; j++) {
8858               tonemapCurveGreen.tonemap_points[i][j] =
8859                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8860               point++;
8861            }
8862        }
8863        tonemapCurves.curves[0] = tonemapCurveGreen;
8864
8865        /* ch 1 = B */
8866        point = 0;
8867        cam_tonemap_curve_t tonemapCurveBlue;
8868        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8869            for (size_t j = 0; j < 2; j++) {
8870               tonemapCurveBlue.tonemap_points[i][j] =
8871                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8872               point++;
8873            }
8874        }
8875        tonemapCurves.curves[1] = tonemapCurveBlue;
8876
8877        /* ch 2 = R */
8878        point = 0;
8879        cam_tonemap_curve_t tonemapCurveRed;
8880        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8881            for (size_t j = 0; j < 2; j++) {
8882               tonemapCurveRed.tonemap_points[i][j] =
8883                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8884               point++;
8885            }
8886        }
8887        tonemapCurves.curves[2] = tonemapCurveRed;
8888
8889        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8890                tonemapCurves)) {
8891            rc = BAD_VALUE;
8892        }
8893    }
8894
8895    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8896        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8897        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8898                captureIntent)) {
8899            rc = BAD_VALUE;
8900        }
8901    }
8902
8903    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8904        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8905        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8906                blackLevelLock)) {
8907            rc = BAD_VALUE;
8908        }
8909    }
8910
8911    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8912        uint8_t lensShadingMapMode =
8913                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8914        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8915                lensShadingMapMode)) {
8916            rc = BAD_VALUE;
8917        }
8918    }
8919
8920    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8921        cam_area_t roi;
8922        bool reset = true;
8923        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8924
8925        // Map coordinate system from active array to sensor output.
8926        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8927                roi.rect.height);
8928
8929        if (scalerCropSet) {
8930            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8931        }
8932        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8933            rc = BAD_VALUE;
8934        }
8935    }
8936
8937    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8938        cam_area_t roi;
8939        bool reset = true;
8940        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8941
8942        // Map coordinate system from active array to sensor output.
8943        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8944                roi.rect.height);
8945
8946        if (scalerCropSet) {
8947            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8948        }
8949        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8950            rc = BAD_VALUE;
8951        }
8952    }
8953
8954    // CDS for non-HFR non-video mode
8955    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8956            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
8957        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8958        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8959            LOGE("Invalid CDS mode %d!", *fwk_cds);
8960        } else {
8961            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8962                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8963                rc = BAD_VALUE;
8964            }
8965        }
8966    }
8967
8968    // TNR
8969    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8970        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8971        uint8_t b_TnrRequested = 0;
8972        cam_denoise_param_t tnr;
8973        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8974        tnr.process_plates =
8975            (cam_denoise_process_type_t)frame_settings.find(
8976            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8977        b_TnrRequested = tnr.denoise_enable;
8978        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8979            rc = BAD_VALUE;
8980        }
8981    }
8982
8983    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8984        int32_t fwk_testPatternMode =
8985                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8986        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8987                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8988
8989        if (NAME_NOT_FOUND != testPatternMode) {
8990            cam_test_pattern_data_t testPatternData;
8991            memset(&testPatternData, 0, sizeof(testPatternData));
8992            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8993            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8994                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8995                int32_t *fwk_testPatternData =
8996                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8997                testPatternData.r = fwk_testPatternData[0];
8998                testPatternData.b = fwk_testPatternData[3];
8999                switch (gCamCapability[mCameraId]->color_arrangement) {
9000                    case CAM_FILTER_ARRANGEMENT_RGGB:
9001                    case CAM_FILTER_ARRANGEMENT_GRBG:
9002                        testPatternData.gr = fwk_testPatternData[1];
9003                        testPatternData.gb = fwk_testPatternData[2];
9004                        break;
9005                    case CAM_FILTER_ARRANGEMENT_GBRG:
9006                    case CAM_FILTER_ARRANGEMENT_BGGR:
9007                        testPatternData.gr = fwk_testPatternData[2];
9008                        testPatternData.gb = fwk_testPatternData[1];
9009                        break;
9010                    default:
9011                        LOGE("color arrangement %d is not supported",
9012                                gCamCapability[mCameraId]->color_arrangement);
9013                        break;
9014                }
9015            }
9016            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9017                    testPatternData)) {
9018                rc = BAD_VALUE;
9019            }
9020        } else {
9021            LOGE("Invalid framework sensor test pattern mode %d",
9022                    fwk_testPatternMode);
9023        }
9024    }
9025
9026    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9027        size_t count = 0;
9028        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9029        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9030                gps_coords.data.d, gps_coords.count, count);
9031        if (gps_coords.count != count) {
9032            rc = BAD_VALUE;
9033        }
9034    }
9035
9036    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9037        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9038        size_t count = 0;
9039        const char *gps_methods_src = (const char *)
9040                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9041        memset(gps_methods, '\0', sizeof(gps_methods));
9042        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9043        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9044                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9045        if (GPS_PROCESSING_METHOD_SIZE != count) {
9046            rc = BAD_VALUE;
9047        }
9048    }
9049
9050    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9051        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9052        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9053                gps_timestamp)) {
9054            rc = BAD_VALUE;
9055        }
9056    }
9057
9058    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9059        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9060        cam_rotation_info_t rotation_info;
9061        if (orientation == 0) {
9062           rotation_info.rotation = ROTATE_0;
9063        } else if (orientation == 90) {
9064           rotation_info.rotation = ROTATE_90;
9065        } else if (orientation == 180) {
9066           rotation_info.rotation = ROTATE_180;
9067        } else if (orientation == 270) {
9068           rotation_info.rotation = ROTATE_270;
9069        }
9070        rotation_info.streamId = snapshotStreamId;
9071        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9072        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9073            rc = BAD_VALUE;
9074        }
9075    }
9076
9077    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9078        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9079        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9080            rc = BAD_VALUE;
9081        }
9082    }
9083
9084    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9085        uint32_t thumb_quality = (uint32_t)
9086                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9087        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9088                thumb_quality)) {
9089            rc = BAD_VALUE;
9090        }
9091    }
9092
9093    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9094        cam_dimension_t dim;
9095        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9096        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9097        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9098            rc = BAD_VALUE;
9099        }
9100    }
9101
9102    // Internal metadata
9103    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9104        size_t count = 0;
9105        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9106        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9107                privatedata.data.i32, privatedata.count, count);
9108        if (privatedata.count != count) {
9109            rc = BAD_VALUE;
9110        }
9111    }
9112
9113    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
9114        uint8_t* use_av_timer =
9115                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
9116        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
9117            rc = BAD_VALUE;
9118        }
9119    }
9120
9121    // EV step
9122    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9123            gCamCapability[mCameraId]->exp_compensation_step)) {
9124        rc = BAD_VALUE;
9125    }
9126
9127    // CDS info
9128    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9129        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9130                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9131
9132        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9133                CAM_INTF_META_CDS_DATA, *cdsData)) {
9134            rc = BAD_VALUE;
9135        }
9136    }
9137
9138    return rc;
9139}
9140
9141/*===========================================================================
9142 * FUNCTION   : captureResultCb
9143 *
9144 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9145 *
9146 * PARAMETERS :
9147 *   @frame  : frame information from mm-camera-interface
9148 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9149 *   @userdata: userdata
9150 *
9151 * RETURN     : NONE
9152 *==========================================================================*/
9153void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9154                camera3_stream_buffer_t *buffer,
9155                uint32_t frame_number, bool isInputBuffer, void *userdata)
9156{
9157    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9158    if (hw == NULL) {
9159        LOGE("Invalid hw %p", hw);
9160        return;
9161    }
9162
9163    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9164    return;
9165}
9166
9167
9168/*===========================================================================
9169 * FUNCTION   : initialize
9170 *
9171 * DESCRIPTION: Pass framework callback pointers to HAL
9172 *
9173 * PARAMETERS :
9174 *
9175 *
9176 * RETURN     : Success : 0
9177 *              Failure: -ENODEV
9178 *==========================================================================*/
9179
9180int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9181                                  const camera3_callback_ops_t *callback_ops)
9182{
9183    LOGD("E");
9184    QCamera3HardwareInterface *hw =
9185        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9186    if (!hw) {
9187        LOGE("NULL camera device");
9188        return -ENODEV;
9189    }
9190
9191    int rc = hw->initialize(callback_ops);
9192    LOGD("X");
9193    return rc;
9194}
9195
9196/*===========================================================================
9197 * FUNCTION   : configure_streams
9198 *
9199 * DESCRIPTION:
9200 *
9201 * PARAMETERS :
9202 *
9203 *
9204 * RETURN     : Success: 0
9205 *              Failure: -EINVAL (if stream configuration is invalid)
9206 *                       -ENODEV (fatal error)
9207 *==========================================================================*/
9208
9209int QCamera3HardwareInterface::configure_streams(
9210        const struct camera3_device *device,
9211        camera3_stream_configuration_t *stream_list)
9212{
9213    LOGD("E");
9214    QCamera3HardwareInterface *hw =
9215        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9216    if (!hw) {
9217        LOGE("NULL camera device");
9218        return -ENODEV;
9219    }
9220    int rc = hw->configureStreams(stream_list);
9221    LOGD("X");
9222    return rc;
9223}
9224
9225/*===========================================================================
9226 * FUNCTION   : construct_default_request_settings
9227 *
9228 * DESCRIPTION: Configure a settings buffer to meet the required use case
9229 *
9230 * PARAMETERS :
9231 *
9232 *
9233 * RETURN     : Success: Return valid metadata
9234 *              Failure: Return NULL
9235 *==========================================================================*/
9236const camera_metadata_t* QCamera3HardwareInterface::
9237    construct_default_request_settings(const struct camera3_device *device,
9238                                        int type)
9239{
9240
9241    LOGD("E");
9242    camera_metadata_t* fwk_metadata = NULL;
9243    QCamera3HardwareInterface *hw =
9244        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9245    if (!hw) {
9246        LOGE("NULL camera device");
9247        return NULL;
9248    }
9249
9250    fwk_metadata = hw->translateCapabilityToMetadata(type);
9251
9252    LOGD("X");
9253    return fwk_metadata;
9254}
9255
9256/*===========================================================================
9257 * FUNCTION   : process_capture_request
9258 *
9259 * DESCRIPTION:
9260 *
9261 * PARAMETERS :
9262 *
9263 *
9264 * RETURN     :
9265 *==========================================================================*/
9266int QCamera3HardwareInterface::process_capture_request(
9267                    const struct camera3_device *device,
9268                    camera3_capture_request_t *request)
9269{
9270    LOGD("E");
9271    QCamera3HardwareInterface *hw =
9272        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9273    if (!hw) {
9274        LOGE("NULL camera device");
9275        return -EINVAL;
9276    }
9277
9278    int rc = hw->processCaptureRequest(request);
9279    LOGD("X");
9280    return rc;
9281}
9282
9283/*===========================================================================
9284 * FUNCTION   : dump
9285 *
9286 * DESCRIPTION:
9287 *
9288 * PARAMETERS :
9289 *
9290 *
9291 * RETURN     :
9292 *==========================================================================*/
9293
9294void QCamera3HardwareInterface::dump(
9295                const struct camera3_device *device, int fd)
9296{
9297    /* Log level property is read when "adb shell dumpsys media.camera" is
9298       called so that the log level can be controlled without restarting
9299       the media server */
9300    getLogLevel();
9301
9302    LOGD("E");
9303    QCamera3HardwareInterface *hw =
9304        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9305    if (!hw) {
9306        LOGE("NULL camera device");
9307        return;
9308    }
9309
9310    hw->dump(fd);
9311    LOGD("X");
9312    return;
9313}
9314
9315/*===========================================================================
9316 * FUNCTION   : flush
9317 *
9318 * DESCRIPTION:
9319 *
9320 * PARAMETERS :
9321 *
9322 *
9323 * RETURN     :
9324 *==========================================================================*/
9325
9326int QCamera3HardwareInterface::flush(
9327                const struct camera3_device *device)
9328{
9329    int rc;
9330    LOGD("E");
9331    QCamera3HardwareInterface *hw =
9332        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9333    if (!hw) {
9334        LOGE("NULL camera device");
9335        return -EINVAL;
9336    }
9337
9338    pthread_mutex_lock(&hw->mMutex);
9339    // Validate current state
9340    switch (hw->mState) {
9341        case STARTED:
9342            /* valid state */
9343            break;
9344
9345        case ERROR:
9346            pthread_mutex_unlock(&hw->mMutex);
9347            hw->handleCameraDeviceError();
9348            return -ENODEV;
9349
9350        default:
9351            LOGI("Flush returned during state %d", hw->mState);
9352            pthread_mutex_unlock(&hw->mMutex);
9353            return 0;
9354    }
9355    pthread_mutex_unlock(&hw->mMutex);
9356
9357    rc = hw->flush(true /* restart channels */ );
9358    LOGD("X");
9359    return rc;
9360}
9361
9362/*===========================================================================
9363 * FUNCTION   : close_camera_device
9364 *
9365 * DESCRIPTION:
9366 *
9367 * PARAMETERS :
9368 *
9369 *
9370 * RETURN     :
9371 *==========================================================================*/
9372int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
9373{
9374    int ret = NO_ERROR;
9375    QCamera3HardwareInterface *hw =
9376        reinterpret_cast<QCamera3HardwareInterface *>(
9377            reinterpret_cast<camera3_device_t *>(device)->priv);
9378    if (!hw) {
9379        LOGE("NULL camera device");
9380        return BAD_VALUE;
9381    }
9382
9383    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
9384    delete hw;
9385    LOGI("[KPI Perf]: X");
9386    return ret;
9387}
9388
9389/*===========================================================================
9390 * FUNCTION   : getWaveletDenoiseProcessPlate
9391 *
9392 * DESCRIPTION: query wavelet denoise process plate
9393 *
9394 * PARAMETERS : None
9395 *
9396 * RETURN     : WNR prcocess plate value
9397 *==========================================================================*/
9398cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9399{
9400    char prop[PROPERTY_VALUE_MAX];
9401    memset(prop, 0, sizeof(prop));
9402    property_get("persist.denoise.process.plates", prop, "0");
9403    int processPlate = atoi(prop);
9404    switch(processPlate) {
9405    case 0:
9406        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9407    case 1:
9408        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9409    case 2:
9410        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9411    case 3:
9412        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9413    default:
9414        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9415    }
9416}
9417
9418
9419/*===========================================================================
9420 * FUNCTION   : getTemporalDenoiseProcessPlate
9421 *
9422 * DESCRIPTION: query temporal denoise process plate
9423 *
9424 * PARAMETERS : None
9425 *
9426 * RETURN     : TNR prcocess plate value
9427 *==========================================================================*/
9428cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9429{
9430    char prop[PROPERTY_VALUE_MAX];
9431    memset(prop, 0, sizeof(prop));
9432    property_get("persist.tnr.process.plates", prop, "0");
9433    int processPlate = atoi(prop);
9434    switch(processPlate) {
9435    case 0:
9436        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9437    case 1:
9438        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9439    case 2:
9440        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9441    case 3:
9442        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9443    default:
9444        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9445    }
9446}
9447
9448
9449/*===========================================================================
9450 * FUNCTION   : extractSceneMode
9451 *
9452 * DESCRIPTION: Extract scene mode from frameworks set metadata
9453 *
9454 * PARAMETERS :
9455 *      @frame_settings: CameraMetadata reference
9456 *      @metaMode: ANDROID_CONTORL_MODE
9457 *      @hal_metadata: hal metadata structure
9458 *
9459 * RETURN     : None
9460 *==========================================================================*/
9461int32_t QCamera3HardwareInterface::extractSceneMode(
9462        const CameraMetadata &frame_settings, uint8_t metaMode,
9463        metadata_buffer_t *hal_metadata)
9464{
9465    int32_t rc = NO_ERROR;
9466
9467    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9468        camera_metadata_ro_entry entry =
9469                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9470        if (0 == entry.count)
9471            return rc;
9472
9473        uint8_t fwk_sceneMode = entry.data.u8[0];
9474
9475        int val = lookupHalName(SCENE_MODES_MAP,
9476                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9477                fwk_sceneMode);
9478        if (NAME_NOT_FOUND != val) {
9479            uint8_t sceneMode = (uint8_t)val;
9480            LOGD("sceneMode: %d", sceneMode);
9481            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9482                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9483                rc = BAD_VALUE;
9484            }
9485        }
9486    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9487            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9488        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9489        LOGD("sceneMode: %d", sceneMode);
9490        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9491                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9492            rc = BAD_VALUE;
9493        }
9494    }
9495    return rc;
9496}
9497
9498/*===========================================================================
9499 * FUNCTION   : needRotationReprocess
9500 *
9501 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9502 *
9503 * PARAMETERS : none
9504 *
9505 * RETURN     : true: needed
9506 *              false: no need
9507 *==========================================================================*/
9508bool QCamera3HardwareInterface::needRotationReprocess()
9509{
9510    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9511        // current rotation is not zero, and pp has the capability to process rotation
9512        LOGH("need do reprocess for rotation");
9513        return true;
9514    }
9515
9516    return false;
9517}
9518
9519/*===========================================================================
9520 * FUNCTION   : needReprocess
9521 *
9522 * DESCRIPTION: if reprocess in needed
9523 *
9524 * PARAMETERS : none
9525 *
9526 * RETURN     : true: needed
9527 *              false: no need
9528 *==========================================================================*/
9529bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
9530{
9531    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
9532        // TODO: add for ZSL HDR later
9533        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9534        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9535            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
9536            return true;
9537        } else {
9538            LOGH("already post processed frame");
9539            return false;
9540        }
9541    }
9542    return needRotationReprocess();
9543}
9544
9545/*===========================================================================
9546 * FUNCTION   : needJpegExifRotation
9547 *
9548 * DESCRIPTION: if rotation from jpeg is needed
9549 *
9550 * PARAMETERS : none
9551 *
9552 * RETURN     : true: needed
9553 *              false: no need
9554 *==========================================================================*/
9555bool QCamera3HardwareInterface::needJpegExifRotation()
9556{
9557   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9558    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9559       LOGD("Need use Jpeg EXIF Rotation");
9560       return true;
9561    }
9562    return false;
9563}
9564
9565/*===========================================================================
9566 * FUNCTION   : addOfflineReprocChannel
9567 *
9568 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9569 *              coming from input channel
9570 *
9571 * PARAMETERS :
9572 *   @config  : reprocess configuration
9573 *   @inputChHandle : pointer to the input (source) channel
9574 *
9575 *
9576 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
9577 *==========================================================================*/
9578QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9579        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9580{
9581    int32_t rc = NO_ERROR;
9582    QCamera3ReprocessChannel *pChannel = NULL;
9583
9584    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9585            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9586            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9587    if (NULL == pChannel) {
9588        LOGE("no mem for reprocess channel");
9589        return NULL;
9590    }
9591
9592    rc = pChannel->initialize(IS_TYPE_NONE);
9593    if (rc != NO_ERROR) {
9594        LOGE("init reprocess channel failed, ret = %d", rc);
9595        delete pChannel;
9596        return NULL;
9597    }
9598
9599    // pp feature config
9600    cam_pp_feature_config_t pp_config;
9601    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9602
9603    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9604    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
9605            & CAM_QCOM_FEATURE_DSDN) {
9606        //Use CPP CDS incase h/w supports it.
9607        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
9608        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
9609    }
9610    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9611        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
9612    }
9613
9614    rc = pChannel->addReprocStreamsFromSource(pp_config,
9615            config,
9616            IS_TYPE_NONE,
9617            mMetadataChannel);
9618
9619    if (rc != NO_ERROR) {
9620        delete pChannel;
9621        return NULL;
9622    }
9623    return pChannel;
9624}
9625
9626/*===========================================================================
9627 * FUNCTION   : getMobicatMask
9628 *
9629 * DESCRIPTION: returns mobicat mask
9630 *
9631 * PARAMETERS : none
9632 *
9633 * RETURN     : mobicat mask
9634 *
9635 *==========================================================================*/
9636uint8_t QCamera3HardwareInterface::getMobicatMask()
9637{
9638    return m_MobicatMask;
9639}
9640
9641/*===========================================================================
9642 * FUNCTION   : setMobicat
9643 *
9644 * DESCRIPTION: set Mobicat on/off.
9645 *
9646 * PARAMETERS :
9647 *   @params  : none
9648 *
9649 * RETURN     : int32_t type of status
9650 *              NO_ERROR  -- success
9651 *              none-zero failure code
9652 *==========================================================================*/
9653int32_t QCamera3HardwareInterface::setMobicat()
9654{
9655    char value [PROPERTY_VALUE_MAX];
9656    property_get("persist.camera.mobicat", value, "0");
9657    int32_t ret = NO_ERROR;
9658    uint8_t enableMobi = (uint8_t)atoi(value);
9659
9660    if (enableMobi) {
9661        tune_cmd_t tune_cmd;
9662        tune_cmd.type = SET_RELOAD_CHROMATIX;
9663        tune_cmd.module = MODULE_ALL;
9664        tune_cmd.value = TRUE;
9665        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9666                CAM_INTF_PARM_SET_VFE_COMMAND,
9667                tune_cmd);
9668
9669        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9670                CAM_INTF_PARM_SET_PP_COMMAND,
9671                tune_cmd);
9672    }
9673    m_MobicatMask = enableMobi;
9674
9675    return ret;
9676}
9677
9678/*===========================================================================
9679* FUNCTION   : getLogLevel
9680*
9681* DESCRIPTION: Reads the log level property into a variable
9682*
9683* PARAMETERS :
9684*   None
9685*
9686* RETURN     :
9687*   None
9688*==========================================================================*/
9689void QCamera3HardwareInterface::getLogLevel()
9690{
9691    char prop[PROPERTY_VALUE_MAX];
9692    uint32_t globalLogLevel = 0;
9693
9694    property_get("persist.camera.hal.debug", prop, "0");
9695    int val = atoi(prop);
9696    if (0 <= val) {
9697        gCamHal3LogLevel = (uint32_t)val;
9698    }
9699
9700    property_get("persist.camera.kpi.debug", prop, "1");
9701    gKpiDebugLevel = atoi(prop);
9702
9703    property_get("persist.camera.global.debug", prop, "0");
9704    val = atoi(prop);
9705    if (0 <= val) {
9706        globalLogLevel = (uint32_t)val;
9707    }
9708
9709    /* Highest log level among hal.logs and global.logs is selected */
9710    if (gCamHal3LogLevel < globalLogLevel)
9711        gCamHal3LogLevel = globalLogLevel;
9712
9713    return;
9714}
9715
9716/*===========================================================================
9717 * FUNCTION   : validateStreamRotations
9718 *
9719 * DESCRIPTION: Check if the rotations requested are supported
9720 *
9721 * PARAMETERS :
9722 *   @stream_list : streams to be configured
9723 *
9724 * RETURN     : NO_ERROR on success
9725 *              -EINVAL on failure
9726 *
9727 *==========================================================================*/
9728int QCamera3HardwareInterface::validateStreamRotations(
9729        camera3_stream_configuration_t *streamList)
9730{
9731    int rc = NO_ERROR;
9732
9733    /*
9734    * Loop through all streams requested in configuration
9735    * Check if unsupported rotations have been requested on any of them
9736    */
9737    for (size_t j = 0; j < streamList->num_streams; j++){
9738        camera3_stream_t *newStream = streamList->streams[j];
9739
9740        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9741        bool isImplDef = (newStream->format ==
9742                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9743        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9744                isImplDef);
9745
9746        if (isRotated && (!isImplDef || isZsl)) {
9747            LOGE("Error: Unsupported rotation of %d requested for stream"
9748                    "type:%d and stream format:%d",
9749                    newStream->rotation, newStream->stream_type,
9750                    newStream->format);
9751            rc = -EINVAL;
9752            break;
9753        }
9754    }
9755
9756    return rc;
9757}
9758
9759/*===========================================================================
9760* FUNCTION   : getFlashInfo
9761*
9762* DESCRIPTION: Retrieve information about whether the device has a flash.
9763*
9764* PARAMETERS :
9765*   @cameraId  : Camera id to query
9766*   @hasFlash  : Boolean indicating whether there is a flash device
9767*                associated with given camera
9768*   @flashNode : If a flash device exists, this will be its device node.
9769*
9770* RETURN     :
9771*   None
9772*==========================================================================*/
9773void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9774        bool& hasFlash,
9775        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9776{
9777    cam_capability_t* camCapability = gCamCapability[cameraId];
9778    if (NULL == camCapability) {
9779        hasFlash = false;
9780        flashNode[0] = '\0';
9781    } else {
9782        hasFlash = camCapability->flash_available;
9783        strlcpy(flashNode,
9784                (char*)camCapability->flash_dev_name,
9785                QCAMERA_MAX_FILEPATH_LENGTH);
9786    }
9787}
9788
9789/*===========================================================================
9790* FUNCTION   : getEepromVersionInfo
9791*
9792* DESCRIPTION: Retrieve version info of the sensor EEPROM data
9793*
9794* PARAMETERS : None
9795*
9796* RETURN     : string describing EEPROM version
9797*              "\0" if no such info available
9798*==========================================================================*/
9799const char *QCamera3HardwareInterface::getEepromVersionInfo()
9800{
9801    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9802}
9803
9804/*===========================================================================
9805* FUNCTION   : getLdafCalib
9806*
9807* DESCRIPTION: Retrieve Laser AF calibration data
9808*
9809* PARAMETERS : None
9810*
9811* RETURN     : Two uint32_t describing laser AF calibration data
9812*              NULL if none is available.
9813*==========================================================================*/
9814const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9815{
9816    if (mLdafCalibExist) {
9817        return &mLdafCalib[0];
9818    } else {
9819        return NULL;
9820    }
9821}
9822
9823/*===========================================================================
9824 * FUNCTION   : dynamicUpdateMetaStreamInfo
9825 *
9826 * DESCRIPTION: This function:
9827 *             (1) stops all the channels
9828 *             (2) returns error on pending requests and buffers
9829 *             (3) sends metastream_info in setparams
9830 *             (4) starts all channels
9831 *             This is useful when sensor has to be restarted to apply any
9832 *             settings such as frame rate from a different sensor mode
9833 *
9834 * PARAMETERS : None
9835 *
9836 * RETURN     : NO_ERROR on success
9837 *              Error codes on failure
9838 *
9839 *==========================================================================*/
9840int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9841{
9842    ATRACE_CALL();
9843    int rc = NO_ERROR;
9844
9845    LOGD("E");
9846
9847    rc = stopAllChannels();
9848    if (rc < 0) {
9849        LOGE("stopAllChannels failed");
9850        return rc;
9851    }
9852
9853    rc = notifyErrorForPendingRequests();
9854    if (rc < 0) {
9855        LOGE("notifyErrorForPendingRequests failed");
9856        return rc;
9857    }
9858
9859    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
9860        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
9861                "Format:%d",
9862                mStreamConfigInfo.type[i],
9863                mStreamConfigInfo.stream_sizes[i].width,
9864                mStreamConfigInfo.stream_sizes[i].height,
9865                mStreamConfigInfo.postprocess_mask[i],
9866                mStreamConfigInfo.format[i]);
9867    }
9868
9869    /* Send meta stream info once again so that ISP can start */
9870    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9871            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9872    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9873            mParameters);
9874    if (rc < 0) {
9875        LOGE("set Metastreaminfo failed. Sensor mode does not change");
9876    }
9877
9878    rc = startAllChannels();
9879    if (rc < 0) {
9880        LOGE("startAllChannels failed");
9881        return rc;
9882    }
9883
9884    LOGD("X");
9885    return rc;
9886}
9887
9888/*===========================================================================
9889 * FUNCTION   : stopAllChannels
9890 *
9891 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9892 *
9893 * PARAMETERS : None
9894 *
9895 * RETURN     : NO_ERROR on success
9896 *              Error codes on failure
9897 *
9898 *==========================================================================*/
9899int32_t QCamera3HardwareInterface::stopAllChannels()
9900{
9901    int32_t rc = NO_ERROR;
9902
9903    LOGD("Stopping all channels");
9904    // Stop the Streams/Channels
9905    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9906        it != mStreamInfo.end(); it++) {
9907        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9908        if (channel) {
9909            channel->stop();
9910        }
9911        (*it)->status = INVALID;
9912    }
9913
9914    if (mSupportChannel) {
9915        mSupportChannel->stop();
9916    }
9917    if (mAnalysisChannel) {
9918        mAnalysisChannel->stop();
9919    }
9920    if (mRawDumpChannel) {
9921        mRawDumpChannel->stop();
9922    }
9923    if (mMetadataChannel) {
9924        /* If content of mStreamInfo is not 0, there is metadata stream */
9925        mMetadataChannel->stop();
9926    }
9927
9928    LOGD("All channels stopped");
9929    return rc;
9930}
9931
9932/*===========================================================================
9933 * FUNCTION   : startAllChannels
9934 *
9935 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9936 *
9937 * PARAMETERS : None
9938 *
9939 * RETURN     : NO_ERROR on success
9940 *              Error codes on failure
9941 *
9942 *==========================================================================*/
9943int32_t QCamera3HardwareInterface::startAllChannels()
9944{
9945    int32_t rc = NO_ERROR;
9946
9947    LOGD("Start all channels ");
9948    // Start the Streams/Channels
9949    if (mMetadataChannel) {
9950        /* If content of mStreamInfo is not 0, there is metadata stream */
9951        rc = mMetadataChannel->start();
9952        if (rc < 0) {
9953            LOGE("META channel start failed");
9954            return rc;
9955        }
9956    }
9957    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9958        it != mStreamInfo.end(); it++) {
9959        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9960        if (channel) {
9961            rc = channel->start();
9962            if (rc < 0) {
9963                LOGE("channel start failed");
9964                return rc;
9965            }
9966        }
9967    }
9968    if (mAnalysisChannel) {
9969        mAnalysisChannel->start();
9970    }
9971    if (mSupportChannel) {
9972        rc = mSupportChannel->start();
9973        if (rc < 0) {
9974            LOGE("Support channel start failed");
9975            return rc;
9976        }
9977    }
9978    if (mRawDumpChannel) {
9979        rc = mRawDumpChannel->start();
9980        if (rc < 0) {
9981            LOGE("RAW dump channel start failed");
9982            return rc;
9983        }
9984    }
9985
9986    LOGD("All channels started");
9987    return rc;
9988}
9989
9990/*===========================================================================
9991 * FUNCTION   : notifyErrorForPendingRequests
9992 *
9993 * DESCRIPTION: This function sends error for all the pending requests/buffers
9994 *
9995 * PARAMETERS : None
9996 *
9997 * RETURN     : Error codes
9998 *              NO_ERROR on success
9999 *
10000 *==========================================================================*/
10001int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10002{
10003    int32_t rc = NO_ERROR;
10004    unsigned int frameNum = 0;
10005    camera3_capture_result_t result;
10006    camera3_stream_buffer_t *pStream_Buf = NULL;
10007
10008    memset(&result, 0, sizeof(camera3_capture_result_t));
10009
10010    if (mPendingRequestsList.size() > 0) {
10011        pendingRequestIterator i = mPendingRequestsList.begin();
10012        frameNum = i->frame_number;
10013    } else {
10014        /* There might still be pending buffers even though there are
10015         no pending requests. Setting the frameNum to MAX so that
10016         all the buffers with smaller frame numbers are returned */
10017        frameNum = UINT_MAX;
10018    }
10019
10020    LOGH("Oldest frame num on mPendingRequestsList = %u",
10021       frameNum);
10022
10023    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10024            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10025
10026        if (req->frame_number < frameNum) {
10027            // Send Error notify to frameworks for each buffer for which
10028            // metadata buffer is already sent
10029            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10030                req->frame_number, req->mPendingBufferList.size());
10031
10032            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10033            if (NULL == pStream_Buf) {
10034                LOGE("No memory for pending buffers array");
10035                return NO_MEMORY;
10036            }
10037            memset(pStream_Buf, 0,
10038                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10039            result.result = NULL;
10040            result.frame_number = req->frame_number;
10041            result.num_output_buffers = req->mPendingBufferList.size();
10042            result.output_buffers = pStream_Buf;
10043
10044            size_t index = 0;
10045            for (auto info = req->mPendingBufferList.begin();
10046                info != req->mPendingBufferList.end(); ) {
10047
10048                camera3_notify_msg_t notify_msg;
10049                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10050                notify_msg.type = CAMERA3_MSG_ERROR;
10051                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10052                notify_msg.message.error.error_stream = info->stream;
10053                notify_msg.message.error.frame_number = req->frame_number;
10054                pStream_Buf[index].acquire_fence = -1;
10055                pStream_Buf[index].release_fence = -1;
10056                pStream_Buf[index].buffer = info->buffer;
10057                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10058                pStream_Buf[index].stream = info->stream;
10059                mCallbackOps->notify(mCallbackOps, &notify_msg);
10060                index++;
10061                // Remove buffer from list
10062                info = req->mPendingBufferList.erase(info);
10063            }
10064
10065            // Remove this request from Map
10066            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10067                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10068            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10069
10070            mCallbackOps->process_capture_result(mCallbackOps, &result);
10071
10072            delete [] pStream_Buf;
10073        } else {
10074
10075            // Go through the pending requests info and send error request to framework
10076            LOGE("Sending ERROR REQUEST for all pending requests");
10077            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10078
10079            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10080
10081            // Send error notify to frameworks
10082            camera3_notify_msg_t notify_msg;
10083            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10084            notify_msg.type = CAMERA3_MSG_ERROR;
10085            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10086            notify_msg.message.error.error_stream = NULL;
10087            notify_msg.message.error.frame_number = req->frame_number;
10088            mCallbackOps->notify(mCallbackOps, &notify_msg);
10089
10090            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10091            if (NULL == pStream_Buf) {
10092                LOGE("No memory for pending buffers array");
10093                return NO_MEMORY;
10094            }
10095            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10096
10097            result.result = NULL;
10098            result.frame_number = req->frame_number;
10099            result.input_buffer = i->input_buffer;
10100            result.num_output_buffers = req->mPendingBufferList.size();
10101            result.output_buffers = pStream_Buf;
10102
10103            size_t index = 0;
10104            for (auto info = req->mPendingBufferList.begin();
10105                info != req->mPendingBufferList.end(); ) {
10106                pStream_Buf[index].acquire_fence = -1;
10107                pStream_Buf[index].release_fence = -1;
10108                pStream_Buf[index].buffer = info->buffer;
10109                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10110                pStream_Buf[index].stream = info->stream;
10111                index++;
10112                // Remove buffer from list
10113                info = req->mPendingBufferList.erase(info);
10114            }
10115
10116            // Remove this request from Map
10117            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10118                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10119            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10120
10121            mCallbackOps->process_capture_result(mCallbackOps, &result);
10122            delete [] pStream_Buf;
10123            i = erasePendingRequest(i);
10124        }
10125    }
10126
10127    /* Reset pending frame Drop list and requests list */
10128    mPendingFrameDropList.clear();
10129
10130    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10131        req.mPendingBufferList.clear();
10132    }
10133    mPendingBuffersMap.mPendingBuffersInRequest.clear();
10134    mPendingReprocessResultList.clear();
10135    LOGH("Cleared all the pending buffers ");
10136
10137    return rc;
10138}
10139
10140bool QCamera3HardwareInterface::isOnEncoder(
10141        const cam_dimension_t max_viewfinder_size,
10142        uint32_t width, uint32_t height)
10143{
10144    return (width > (uint32_t)max_viewfinder_size.width ||
10145            height > (uint32_t)max_viewfinder_size.height);
10146}
10147
10148/*===========================================================================
10149 * FUNCTION   : setBundleInfo
10150 *
10151 * DESCRIPTION: Set bundle info for all streams that are bundle.
10152 *
10153 * PARAMETERS : None
10154 *
10155 * RETURN     : NO_ERROR on success
10156 *              Error codes on failure
10157 *==========================================================================*/
10158int32_t QCamera3HardwareInterface::setBundleInfo()
10159{
10160    int32_t rc = NO_ERROR;
10161
10162    if (mChannelHandle) {
10163        cam_bundle_config_t bundleInfo;
10164        memset(&bundleInfo, 0, sizeof(bundleInfo));
10165        rc = mCameraHandle->ops->get_bundle_info(
10166                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10167        if (rc != NO_ERROR) {
10168            LOGE("get_bundle_info failed");
10169            return rc;
10170        }
10171        if (mAnalysisChannel) {
10172            mAnalysisChannel->setBundleInfo(bundleInfo);
10173        }
10174        if (mSupportChannel) {
10175            mSupportChannel->setBundleInfo(bundleInfo);
10176        }
10177        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10178                it != mStreamInfo.end(); it++) {
10179            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10180            channel->setBundleInfo(bundleInfo);
10181        }
10182        if (mRawDumpChannel) {
10183            mRawDumpChannel->setBundleInfo(bundleInfo);
10184        }
10185    }
10186
10187    return rc;
10188}
10189
10190/*===========================================================================
10191 * FUNCTION   : get_num_overall_buffers
10192 *
10193 * DESCRIPTION: Estimate number of pending buffers across all requests.
10194 *
10195 * PARAMETERS : None
10196 *
10197 * RETURN     : Number of overall pending buffers
10198 *
10199 *==========================================================================*/
10200uint32_t PendingBuffersMap::get_num_overall_buffers()
10201{
10202    uint32_t sum_buffers = 0;
10203    for (auto &req : mPendingBuffersInRequest) {
10204        sum_buffers += req.mPendingBufferList.size();
10205    }
10206    return sum_buffers;
10207}
10208
10209/*===========================================================================
10210 * FUNCTION   : removeBuf
10211 *
10212 * DESCRIPTION: Remove a matching buffer from tracker.
10213 *
10214 * PARAMETERS : @buffer: image buffer for the callback
10215 *
10216 * RETURN     : None
10217 *
10218 *==========================================================================*/
10219void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10220{
10221    bool buffer_found = false;
10222    for (auto req = mPendingBuffersInRequest.begin();
10223            req != mPendingBuffersInRequest.end(); req++) {
10224        for (auto k = req->mPendingBufferList.begin();
10225                k != req->mPendingBufferList.end(); k++ ) {
10226            if (k->buffer == buffer) {
10227                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10228                        req->frame_number, buffer);
10229                k = req->mPendingBufferList.erase(k);
10230                if (req->mPendingBufferList.empty()) {
10231                    // Remove this request from Map
10232                    req = mPendingBuffersInRequest.erase(req);
10233                }
10234                buffer_found = true;
10235                break;
10236            }
10237        }
10238        if (buffer_found) {
10239            break;
10240        }
10241    }
10242    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
10243            get_num_overall_buffers());
10244}
10245
10246}; //end namespace qcamera
10247