QCamera3HWI.cpp revision 6174b7950f7fe2e593556e742c7c6ab68e1e57f8
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <sync/sync.h>
44#include "gralloc_priv.h"
45
46// Display dependencies
47#include "qdMetaData.h"
48
49// Camera dependencies
50#include "android/QCamera3External.h"
51#include "util/QCameraFlash.h"
52#include "QCamera3HWI.h"
53#include "QCamera3VendorTags.h"
54#include "QCameraTrace.h"
55
56extern "C" {
57#include "mm_camera_dbg.h"
58}
59
60using namespace android;
61
62namespace qcamera {
63
64#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
65
66#define EMPTY_PIPELINE_DELAY 2
67#define PARTIAL_RESULT_COUNT 2
68#define FRAME_SKIP_DELAY     0
69
70#define MAX_VALUE_8BIT ((1<<8)-1)
71#define MAX_VALUE_10BIT ((1<<10)-1)
72#define MAX_VALUE_12BIT ((1<<12)-1)
73
74#define VIDEO_4K_WIDTH  3840
75#define VIDEO_4K_HEIGHT 2160
76
77#define MAX_EIS_WIDTH 3840
78#define MAX_EIS_HEIGHT 2160
79
80#define MAX_RAW_STREAMS        1
81#define MAX_STALLING_STREAMS   1
82#define MAX_PROCESSED_STREAMS  3
83/* Batch mode is enabled only if FPS set is equal to or greater than this */
84#define MIN_FPS_FOR_BATCH_MODE (120)
85#define PREVIEW_FPS_FOR_HFR    (30)
86#define DEFAULT_VIDEO_FPS      (30.0)
87#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
88#define MAX_HFR_BATCH_SIZE     (8)
89#define REGIONS_TUPLE_COUNT    5
90#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
91// Set a threshold for detection of missing buffers //seconds
92#define MISSING_REQUEST_BUF_TIMEOUT 3
93#define FLUSH_TIMEOUT 3
94#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
95
96#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
97                                              CAM_QCOM_FEATURE_CROP |\
98                                              CAM_QCOM_FEATURE_ROTATION |\
99                                              CAM_QCOM_FEATURE_SHARPNESS |\
100                                              CAM_QCOM_FEATURE_SCALE |\
101                                              CAM_QCOM_FEATURE_CAC |\
102                                              CAM_QCOM_FEATURE_CDS )
103/* Per configuration size for static metadata length*/
104#define PER_CONFIGURATION_SIZE_3 (3)
105
106#define TIMEOUT_NEVER -1
107
108cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
109const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
110extern pthread_mutex_t gCamLock;
111volatile uint32_t gCamHal3LogLevel = 1;
112extern uint8_t gNumCameraSessions;
113
114const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
115    {"On",  CAM_CDS_MODE_ON},
116    {"Off", CAM_CDS_MODE_OFF},
117    {"Auto",CAM_CDS_MODE_AUTO}
118};
119
120const QCamera3HardwareInterface::QCameraMap<
121        camera_metadata_enum_android_control_effect_mode_t,
122        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
123    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
124    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
125    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
126    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
127    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
128    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
129    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
130    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
132};
133
134const QCamera3HardwareInterface::QCameraMap<
135        camera_metadata_enum_android_control_awb_mode_t,
136        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
137    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
138    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
139    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
140    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
141    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
142    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
143    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
145    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
146};
147
148const QCamera3HardwareInterface::QCameraMap<
149        camera_metadata_enum_android_control_scene_mode_t,
150        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
151    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
152    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
153    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
154    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
155    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
157    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
158    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
159    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
160    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
161    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
162    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
163    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
164    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
165    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
166    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
167};
168
169const QCamera3HardwareInterface::QCameraMap<
170        camera_metadata_enum_android_control_af_mode_t,
171        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
172    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
174    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
175    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
176    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
177    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_color_correction_aberration_mode_t,
183        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
184    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
185            CAM_COLOR_CORRECTION_ABERRATION_OFF },
186    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
187            CAM_COLOR_CORRECTION_ABERRATION_FAST },
188    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
189            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
190};
191
192const QCamera3HardwareInterface::QCameraMap<
193        camera_metadata_enum_android_control_ae_antibanding_mode_t,
194        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
195    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_control_ae_mode_t,
203        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
204    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
205    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
207    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
208    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
209};
210
211const QCamera3HardwareInterface::QCameraMap<
212        camera_metadata_enum_android_flash_mode_t,
213        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
214    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
215    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
216    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220        camera_metadata_enum_android_statistics_face_detect_mode_t,
221        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
222    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
229        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
230    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
231      CAM_FOCUS_UNCALIBRATED },
232    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
233      CAM_FOCUS_APPROXIMATE },
234    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
235      CAM_FOCUS_CALIBRATED }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239        camera_metadata_enum_android_lens_state_t,
240        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
241    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
242    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
243};
244
245const int32_t available_thumbnail_sizes[] = {0, 0,
246                                             176, 144,
247                                             240, 144,
248                                             256, 144,
249                                             240, 160,
250                                             256, 154,
251                                             240, 240,
252                                             320, 240};
253
254const QCamera3HardwareInterface::QCameraMap<
255        camera_metadata_enum_android_sensor_test_pattern_mode_t,
256        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
257    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
258    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
259    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
260    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
261    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
262    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
263};
264
265/* Since there is no mapping for all the options some Android enum are not listed.
266 * Also, the order in this list is important because while mapping from HAL to Android it will
267 * traverse from lower to higher index which means that for HAL values that are map to different
268 * Android values, the traverse logic will select the first one found.
269 */
270const QCamera3HardwareInterface::QCameraMap<
271        camera_metadata_enum_android_sensor_reference_illuminant1_t,
272        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
273    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
276    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
277    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
278    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
279    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
280    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
293    { 60, CAM_HFR_MODE_60FPS},
294    { 90, CAM_HFR_MODE_90FPS},
295    { 120, CAM_HFR_MODE_120FPS},
296    { 150, CAM_HFR_MODE_150FPS},
297    { 180, CAM_HFR_MODE_180FPS},
298    { 210, CAM_HFR_MODE_210FPS},
299    { 240, CAM_HFR_MODE_240FPS},
300    { 480, CAM_HFR_MODE_480FPS},
301};
302
303camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
304    .initialize                         = QCamera3HardwareInterface::initialize,
305    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
306    .register_stream_buffers            = NULL,
307    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
308    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
309    .get_metadata_vendor_tag_ops        = NULL,
310    .dump                               = QCamera3HardwareInterface::dump,
311    .flush                              = QCamera3HardwareInterface::flush,
312    .reserved                           = {0},
313};
314
315// initialise to some default value
316uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
317
318/*===========================================================================
319 * FUNCTION   : QCamera3HardwareInterface
320 *
321 * DESCRIPTION: constructor of QCamera3HardwareInterface
322 *
323 * PARAMETERS :
324 *   @cameraId  : camera ID
325 *
326 * RETURN     : none
327 *==========================================================================*/
328QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
329        const camera_module_callbacks_t *callbacks)
330    : mCameraId(cameraId),
331      mCameraHandle(NULL),
332      mCameraInitialized(false),
333      mCallbackOps(NULL),
334      mMetadataChannel(NULL),
335      mPictureChannel(NULL),
336      mRawChannel(NULL),
337      mSupportChannel(NULL),
338      mAnalysisChannel(NULL),
339      mRawDumpChannel(NULL),
340      mDummyBatchChannel(NULL),
341      m_perfLock(),
342      mCommon(),
343      mChannelHandle(0),
344      mFirstConfiguration(true),
345      mFlush(false),
346      mFlushPerf(false),
347      mParamHeap(NULL),
348      mParameters(NULL),
349      mPrevParameters(NULL),
350      m_bIsVideo(false),
351      m_bIs4KVideo(false),
352      m_bEisSupportedSize(false),
353      m_bEisEnable(false),
354      m_MobicatMask(0),
355      mMinProcessedFrameDuration(0),
356      mMinJpegFrameDuration(0),
357      mMinRawFrameDuration(0),
358      mMetaFrameCount(0U),
359      mUpdateDebugLevel(false),
360      mCallbacks(callbacks),
361      mCaptureIntent(0),
362      mCacMode(0),
363      mHybridAeEnable(0),
364      /* DevCamDebug metadata internal m control*/
365      mDevCamDebugMetaEnable(0),
366      /* DevCamDebug metadata end */
367      mBatchSize(0),
368      mToBeQueuedVidBufs(0),
369      mHFRVideoFps(DEFAULT_VIDEO_FPS),
370      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
371      mFirstFrameNumberInBatch(0),
372      mNeedSensorRestart(false),
373      mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
374      mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
375      mLdafCalibExist(false),
376      mPowerHintEnabled(false),
377      mLastCustIntentFrmNum(-1),
378      mState(CLOSED),
379      mIsDeviceLinked(false),
380      mIsMainCamera(true),
381      mLinkedCameraId(0),
382      m_pRelCamSyncHeap(NULL),
383      m_pRelCamSyncBuf(NULL)
384{
385    getLogLevel();
386    m_perfLock.lock_init();
387    mCommon.init(gCamCapability[cameraId]);
388    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
389    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
390    mCameraDevice.common.close = close_camera_device;
391    mCameraDevice.ops = &mCameraOps;
392    mCameraDevice.priv = this;
393    gCamCapability[cameraId]->version = CAM_HAL_V3;
394    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
395    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
396    gCamCapability[cameraId]->min_num_pp_bufs = 3;
397
398    pthread_cond_init(&mBuffersCond, NULL);
399
400    pthread_cond_init(&mRequestCond, NULL);
401    mPendingLiveRequest = 0;
402    mCurrentRequestId = -1;
403    pthread_mutex_init(&mMutex, NULL);
404
405    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
406        mDefaultMetadata[i] = NULL;
407
408    // Getting system props of different kinds
409    char prop[PROPERTY_VALUE_MAX];
410    memset(prop, 0, sizeof(prop));
411    property_get("persist.camera.raw.dump", prop, "0");
412    mEnableRawDump = atoi(prop);
413    if (mEnableRawDump)
414        LOGD("Raw dump from Camera HAL enabled");
415
416    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
417    memset(mLdafCalib, 0, sizeof(mLdafCalib));
418
419    memset(prop, 0, sizeof(prop));
420    property_get("persist.camera.tnr.preview", prop, "0");
421    m_bTnrPreview = (uint8_t)atoi(prop);
422
423    memset(prop, 0, sizeof(prop));
424    property_get("persist.camera.tnr.video", prop, "0");
425    m_bTnrVideo = (uint8_t)atoi(prop);
426
427    memset(prop, 0, sizeof(prop));
428    property_get("persist.camera.avtimer.debug", prop, "0");
429    m_debug_avtimer = (uint8_t)atoi(prop);
430
431    //Load and read GPU library.
432    lib_surface_utils = NULL;
433    LINK_get_surface_pixel_alignment = NULL;
434    mSurfaceStridePadding = CAM_PAD_TO_32;
435    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
436    if (lib_surface_utils) {
437        *(void **)&LINK_get_surface_pixel_alignment =
438                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
439         if (LINK_get_surface_pixel_alignment) {
440             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
441         }
442         dlclose(lib_surface_utils);
443    }
444}
445
446/*===========================================================================
447 * FUNCTION   : ~QCamera3HardwareInterface
448 *
449 * DESCRIPTION: destructor of QCamera3HardwareInterface
450 *
451 * PARAMETERS : none
452 *
453 * RETURN     : none
454 *==========================================================================*/
455QCamera3HardwareInterface::~QCamera3HardwareInterface()
456{
457    LOGD("E");
458
459    /* Turn off current power hint before acquiring perfLock in case they
460     * conflict with each other */
461    disablePowerHint();
462
463    m_perfLock.lock_acq();
464
465    /* We need to stop all streams before deleting any stream */
466    if (mRawDumpChannel) {
467        mRawDumpChannel->stop();
468    }
469
470    // NOTE: 'camera3_stream_t *' objects are already freed at
471    //        this stage by the framework
472    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
473        it != mStreamInfo.end(); it++) {
474        QCamera3ProcessingChannel *channel = (*it)->channel;
475        if (channel) {
476            channel->stop();
477        }
478    }
479    if (mSupportChannel)
480        mSupportChannel->stop();
481
482    if (mAnalysisChannel) {
483        mAnalysisChannel->stop();
484    }
485    if (mMetadataChannel) {
486        mMetadataChannel->stop();
487    }
488    if (mChannelHandle) {
489        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
490                mChannelHandle);
491        LOGD("stopping channel %d", mChannelHandle);
492    }
493
494    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
495        it != mStreamInfo.end(); it++) {
496        QCamera3ProcessingChannel *channel = (*it)->channel;
497        if (channel)
498            delete channel;
499        free (*it);
500    }
501    if (mSupportChannel) {
502        delete mSupportChannel;
503        mSupportChannel = NULL;
504    }
505
506    if (mAnalysisChannel) {
507        delete mAnalysisChannel;
508        mAnalysisChannel = NULL;
509    }
510    if (mRawDumpChannel) {
511        delete mRawDumpChannel;
512        mRawDumpChannel = NULL;
513    }
514    if (mDummyBatchChannel) {
515        delete mDummyBatchChannel;
516        mDummyBatchChannel = NULL;
517    }
518    mPictureChannel = NULL;
519
520    if (mMetadataChannel) {
521        delete mMetadataChannel;
522        mMetadataChannel = NULL;
523    }
524
525    /* Clean up all channels */
526    if (mCameraInitialized) {
527        if(!mFirstConfiguration){
528            //send the last unconfigure
529            cam_stream_size_info_t stream_config_info;
530            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
531            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
532            stream_config_info.buffer_info.max_buffers =
533                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
534            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
535                    stream_config_info);
536            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
537            if (rc < 0) {
538                LOGE("set_parms failed for unconfigure");
539            }
540        }
541        deinitParameters();
542    }
543
544    if (mChannelHandle) {
545        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
546                mChannelHandle);
547        LOGH("deleting channel %d", mChannelHandle);
548        mChannelHandle = 0;
549    }
550
551    if (mState != CLOSED)
552        closeCamera();
553
554    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
555        req.mPendingBufferList.clear();
556    }
557    mPendingBuffersMap.mPendingBuffersInRequest.clear();
558    mPendingReprocessResultList.clear();
559    for (pendingRequestIterator i = mPendingRequestsList.begin();
560            i != mPendingRequestsList.end();) {
561        i = erasePendingRequest(i);
562    }
563    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
564        if (mDefaultMetadata[i])
565            free_camera_metadata(mDefaultMetadata[i]);
566
567    m_perfLock.lock_rel();
568    m_perfLock.lock_deinit();
569
570    pthread_cond_destroy(&mRequestCond);
571
572    pthread_cond_destroy(&mBuffersCond);
573
574    pthread_mutex_destroy(&mMutex);
575    LOGD("X");
576}
577
578/*===========================================================================
579 * FUNCTION   : erasePendingRequest
580 *
581 * DESCRIPTION: function to erase a desired pending request after freeing any
582 *              allocated memory
583 *
584 * PARAMETERS :
585 *   @i       : iterator pointing to pending request to be erased
586 *
587 * RETURN     : iterator pointing to the next request
588 *==========================================================================*/
589QCamera3HardwareInterface::pendingRequestIterator
590        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
591{
592    if (i->input_buffer != NULL) {
593        free(i->input_buffer);
594        i->input_buffer = NULL;
595    }
596    if (i->settings != NULL)
597        free_camera_metadata((camera_metadata_t*)i->settings);
598    return mPendingRequestsList.erase(i);
599}
600
601/*===========================================================================
602 * FUNCTION   : camEvtHandle
603 *
604 * DESCRIPTION: Function registered to mm-camera-interface to handle events
605 *
606 * PARAMETERS :
607 *   @camera_handle : interface layer camera handle
608 *   @evt           : ptr to event
609 *   @user_data     : user data ptr
610 *
611 * RETURN     : none
612 *==========================================================================*/
613void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
614                                          mm_camera_event_t *evt,
615                                          void *user_data)
616{
617    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
618    if (obj && evt) {
619        switch(evt->server_event_type) {
620            case CAM_EVENT_TYPE_DAEMON_DIED:
621                pthread_mutex_lock(&obj->mMutex);
622                obj->mState = ERROR;
623                pthread_mutex_unlock(&obj->mMutex);
624                LOGE("Fatal, camera daemon died");
625                break;
626
627            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
628                LOGD("HAL got request pull from Daemon");
629                pthread_mutex_lock(&obj->mMutex);
630                obj->mWokenUpByDaemon = true;
631                obj->unblockRequestIfNecessary();
632                pthread_mutex_unlock(&obj->mMutex);
633                break;
634
635            default:
636                LOGW("Warning: Unhandled event %d",
637                        evt->server_event_type);
638                break;
639        }
640    } else {
641        LOGE("NULL user_data/evt");
642    }
643}
644
645/*===========================================================================
646 * FUNCTION   : openCamera
647 *
648 * DESCRIPTION: open camera
649 *
650 * PARAMETERS :
651 *   @hw_device  : double ptr for camera device struct
652 *
653 * RETURN     : int32_t type of status
654 *              NO_ERROR  -- success
655 *              none-zero failure code
656 *==========================================================================*/
657int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
658{
659    int rc = 0;
660    if (mState != CLOSED) {
661        *hw_device = NULL;
662        return PERMISSION_DENIED;
663    }
664
665    m_perfLock.lock_acq();
666    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
667             mCameraId);
668
669    rc = openCamera();
670    if (rc == 0) {
671        *hw_device = &mCameraDevice.common;
672    } else
673        *hw_device = NULL;
674
675    m_perfLock.lock_rel();
676    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
677             mCameraId, rc);
678
679    if (rc == NO_ERROR) {
680        mState = OPENED;
681    }
682    return rc;
683}
684
685/*===========================================================================
686 * FUNCTION   : openCamera
687 *
688 * DESCRIPTION: open camera
689 *
690 * PARAMETERS : none
691 *
692 * RETURN     : int32_t type of status
693 *              NO_ERROR  -- success
694 *              none-zero failure code
695 *==========================================================================*/
696int QCamera3HardwareInterface::openCamera()
697{
698    int rc = 0;
699    char value[PROPERTY_VALUE_MAX];
700
701    KPI_ATRACE_CALL();
702    if (mCameraHandle) {
703        LOGE("Failure: Camera already opened");
704        return ALREADY_EXISTS;
705    }
706
707    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
708    if (rc < 0) {
709        LOGE("Failed to reserve flash for camera id: %d",
710                mCameraId);
711        return UNKNOWN_ERROR;
712    }
713
714    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
715    if (rc) {
716        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
717        return rc;
718    }
719
720    if (!mCameraHandle) {
721        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
722        return -ENODEV;
723    }
724
725    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
726            camEvtHandle, (void *)this);
727
728    if (rc < 0) {
729        LOGE("Error, failed to register event callback");
730        /* Not closing camera here since it is already handled in destructor */
731        return FAILED_TRANSACTION;
732    }
733
734    mExifParams.debug_params =
735            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
736    if (mExifParams.debug_params) {
737        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
738    } else {
739        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
740        return NO_MEMORY;
741    }
742    mFirstConfiguration = true;
743
744    //Notify display HAL that a camera session is active.
745    //But avoid calling the same during bootup because camera service might open/close
746    //cameras at boot time during its initialization and display service will also internally
747    //wait for camera service to initialize first while calling this display API, resulting in a
748    //deadlock situation. Since boot time camera open/close calls are made only to fetch
749    //capabilities, no need of this display bw optimization.
750    //Use "service.bootanim.exit" property to know boot status.
751    property_get("service.bootanim.exit", value, "0");
752    if (atoi(value) == 1) {
753        pthread_mutex_lock(&gCamLock);
754        if (gNumCameraSessions++ == 0) {
755            setCameraLaunchStatus(true);
756        }
757        pthread_mutex_unlock(&gCamLock);
758    }
759
760    //fill the session id needed while linking dual cam
761    pthread_mutex_lock(&gCamLock);
762    rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
763        &sessionId[mCameraId]);
764    pthread_mutex_unlock(&gCamLock);
765
766    if (rc < 0) {
767        LOGE("Error, failed to get sessiion id");
768        return UNKNOWN_ERROR;
769    } else {
770        //Allocate related cam sync buffer
771        //this is needed for the payload that goes along with bundling cmd for related
772        //camera use cases
773        m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
774        rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
775        if(rc != OK) {
776            rc = NO_MEMORY;
777            LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
778            return NO_MEMORY;
779        }
780
781        //Map memory for related cam sync buffer
782        rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
783                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
784                m_pRelCamSyncHeap->getFd(0),
785                sizeof(cam_sync_related_sensors_event_info_t),
786                m_pRelCamSyncHeap->getPtr(0));
787        if(rc < 0) {
788            LOGE("Dualcam: failed to map Related cam sync buffer");
789            rc = FAILED_TRANSACTION;
790            return NO_MEMORY;
791        }
792        m_pRelCamSyncBuf =
793                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
794    }
795
796    LOGH("mCameraId=%d",mCameraId);
797
798    return NO_ERROR;
799}
800
801/*===========================================================================
802 * FUNCTION   : closeCamera
803 *
804 * DESCRIPTION: close camera
805 *
806 * PARAMETERS : none
807 *
808 * RETURN     : int32_t type of status
809 *              NO_ERROR  -- success
810 *              none-zero failure code
811 *==========================================================================*/
812int QCamera3HardwareInterface::closeCamera()
813{
814    KPI_ATRACE_CALL();
815    int rc = NO_ERROR;
816    char value[PROPERTY_VALUE_MAX];
817
818    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
819             mCameraId);
820    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
821    mCameraHandle = NULL;
822
823    //reset session id to some invalid id
824    pthread_mutex_lock(&gCamLock);
825    sessionId[mCameraId] = 0xDEADBEEF;
826    pthread_mutex_unlock(&gCamLock);
827
828    //Notify display HAL that there is no active camera session
829    //but avoid calling the same during bootup. Refer to openCamera
830    //for more details.
831    property_get("service.bootanim.exit", value, "0");
832    if (atoi(value) == 1) {
833        pthread_mutex_lock(&gCamLock);
834        if (--gNumCameraSessions == 0) {
835            setCameraLaunchStatus(false);
836        }
837        pthread_mutex_unlock(&gCamLock);
838    }
839
840    if (NULL != m_pRelCamSyncHeap) {
841        m_pRelCamSyncHeap->deallocate();
842        delete m_pRelCamSyncHeap;
843        m_pRelCamSyncHeap = NULL;
844        m_pRelCamSyncBuf = NULL;
845    }
846
847    if (mExifParams.debug_params) {
848        free(mExifParams.debug_params);
849        mExifParams.debug_params = NULL;
850    }
851    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
852        LOGW("Failed to release flash for camera id: %d",
853                mCameraId);
854    }
855    mState = CLOSED;
856    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
857         mCameraId, rc);
858    return rc;
859}
860
861/*===========================================================================
862 * FUNCTION   : initialize
863 *
864 * DESCRIPTION: Initialize frameworks callback functions
865 *
866 * PARAMETERS :
867 *   @callback_ops : callback function to frameworks
868 *
869 * RETURN     :
870 *
871 *==========================================================================*/
872int QCamera3HardwareInterface::initialize(
873        const struct camera3_callback_ops *callback_ops)
874{
875    ATRACE_CALL();
876    int rc;
877
878    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
879    pthread_mutex_lock(&mMutex);
880
881    // Validate current state
882    switch (mState) {
883        case OPENED:
884            /* valid state */
885            break;
886        default:
887            LOGE("Invalid state %d", mState);
888            rc = -ENODEV;
889            goto err1;
890    }
891
892    rc = initParameters();
893    if (rc < 0) {
894        LOGE("initParamters failed %d", rc);
895        goto err1;
896    }
897    mCallbackOps = callback_ops;
898
899    mChannelHandle = mCameraHandle->ops->add_channel(
900            mCameraHandle->camera_handle, NULL, NULL, this);
901    if (mChannelHandle == 0) {
902        LOGE("add_channel failed");
903        rc = -ENOMEM;
904        pthread_mutex_unlock(&mMutex);
905        return rc;
906    }
907
908    pthread_mutex_unlock(&mMutex);
909    mCameraInitialized = true;
910    mState = INITIALIZED;
911    LOGI("X");
912    return 0;
913
914err1:
915    pthread_mutex_unlock(&mMutex);
916    return rc;
917}
918
919/*===========================================================================
920 * FUNCTION   : validateStreamDimensions
921 *
922 * DESCRIPTION: Check if the configuration requested are those advertised
923 *
924 * PARAMETERS :
925 *   @stream_list : streams to be configured
926 *
927 * RETURN     :
928 *
929 *==========================================================================*/
930int QCamera3HardwareInterface::validateStreamDimensions(
931        camera3_stream_configuration_t *streamList)
932{
933    int rc = NO_ERROR;
934    size_t count = 0;
935
936    camera3_stream_t *inputStream = NULL;
937    /*
938    * Loop through all streams to find input stream if it exists*
939    */
940    for (size_t i = 0; i< streamList->num_streams; i++) {
941        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
942            if (inputStream != NULL) {
943                LOGE("Error, Multiple input streams requested");
944                return -EINVAL;
945            }
946            inputStream = streamList->streams[i];
947        }
948    }
949    /*
950    * Loop through all streams requested in configuration
951    * Check if unsupported sizes have been requested on any of them
952    */
953    for (size_t j = 0; j < streamList->num_streams; j++) {
954        bool sizeFound = false;
955        camera3_stream_t *newStream = streamList->streams[j];
956
957        uint32_t rotatedHeight = newStream->height;
958        uint32_t rotatedWidth = newStream->width;
959        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
960                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
961            rotatedHeight = newStream->width;
962            rotatedWidth = newStream->height;
963        }
964
965        /*
966        * Sizes are different for each type of stream format check against
967        * appropriate table.
968        */
969        switch (newStream->format) {
970        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
971        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
972        case HAL_PIXEL_FORMAT_RAW10:
973            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
974            for (size_t i = 0; i < count; i++) {
975                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
976                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
977                    sizeFound = true;
978                    break;
979                }
980            }
981            break;
982        case HAL_PIXEL_FORMAT_BLOB:
983            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
984            /* Verify set size against generated sizes table */
985            for (size_t i = 0; i < count; i++) {
986                if (((int32_t)rotatedWidth ==
987                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
988                        ((int32_t)rotatedHeight ==
989                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
990                    sizeFound = true;
991                    break;
992                }
993            }
994            break;
995        case HAL_PIXEL_FORMAT_YCbCr_420_888:
996        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
997        default:
998            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
999                    || newStream->stream_type == CAMERA3_STREAM_INPUT
1000                    || IS_USAGE_ZSL(newStream->usage)) {
1001                if (((int32_t)rotatedWidth ==
1002                                gCamCapability[mCameraId]->active_array_size.width) &&
1003                                ((int32_t)rotatedHeight ==
1004                                gCamCapability[mCameraId]->active_array_size.height)) {
1005                    sizeFound = true;
1006                    break;
1007                }
1008                /* We could potentially break here to enforce ZSL stream
1009                 * set from frameworks always is full active array size
1010                 * but it is not clear from the spc if framework will always
1011                 * follow that, also we have logic to override to full array
1012                 * size, so keeping the logic lenient at the moment
1013                 */
1014            }
1015            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1016                    MAX_SIZES_CNT);
1017            for (size_t i = 0; i < count; i++) {
1018                if (((int32_t)rotatedWidth ==
1019                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1020                            ((int32_t)rotatedHeight ==
1021                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1022                    sizeFound = true;
1023                    break;
1024                }
1025            }
1026            break;
1027        } /* End of switch(newStream->format) */
1028
1029        /* We error out even if a single stream has unsupported size set */
1030        if (!sizeFound) {
1031            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1032                    rotatedWidth, rotatedHeight, newStream->format,
1033                    gCamCapability[mCameraId]->active_array_size.width,
1034                    gCamCapability[mCameraId]->active_array_size.height);
1035            rc = -EINVAL;
1036            break;
1037        }
1038    } /* End of for each stream */
1039    return rc;
1040}
1041
1042/*==============================================================================
1043 * FUNCTION   : isSupportChannelNeeded
1044 *
1045 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1046 *
1047 * PARAMETERS :
1048 *   @stream_list : streams to be configured
1049 *   @stream_config_info : the config info for streams to be configured
1050 *
1051 * RETURN     : Boolen true/false decision
1052 *
1053 *==========================================================================*/
1054bool QCamera3HardwareInterface::isSupportChannelNeeded(
1055        camera3_stream_configuration_t *streamList,
1056        cam_stream_size_info_t stream_config_info)
1057{
1058    uint32_t i;
1059    bool pprocRequested = false;
1060    /* Check for conditions where PProc pipeline does not have any streams*/
1061    for (i = 0; i < stream_config_info.num_streams; i++) {
1062        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1063                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1064            pprocRequested = true;
1065            break;
1066        }
1067    }
1068
1069    if (pprocRequested == false )
1070        return true;
1071
1072    /* Dummy stream needed if only raw or jpeg streams present */
1073    for (i = 0; i < streamList->num_streams; i++) {
1074        switch(streamList->streams[i]->format) {
1075            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1076            case HAL_PIXEL_FORMAT_RAW10:
1077            case HAL_PIXEL_FORMAT_RAW16:
1078            case HAL_PIXEL_FORMAT_BLOB:
1079                break;
1080            default:
1081                return false;
1082        }
1083    }
1084    return true;
1085}
1086
1087/*==============================================================================
1088 * FUNCTION   : getSensorOutputSize
1089 *
1090 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1091 *
1092 * PARAMETERS :
1093 *   @sensor_dim : sensor output dimension (output)
1094 *
1095 * RETURN     : int32_t type of status
1096 *              NO_ERROR  -- success
1097 *              none-zero failure code
1098 *
1099 *==========================================================================*/
1100int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1101{
1102    int32_t rc = NO_ERROR;
1103
1104    cam_dimension_t max_dim = {0, 0};
1105    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1106        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1107            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1108        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1109            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1110    }
1111
1112    clear_metadata_buffer(mParameters);
1113
1114    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1115            max_dim);
1116    if (rc != NO_ERROR) {
1117        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1118        return rc;
1119    }
1120
1121    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1122    if (rc != NO_ERROR) {
1123        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1124        return rc;
1125    }
1126
1127    clear_metadata_buffer(mParameters);
1128    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1129
1130    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1131            mParameters);
1132    if (rc != NO_ERROR) {
1133        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1134        return rc;
1135    }
1136
1137    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1138    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1139
1140    return rc;
1141}
1142
1143/*==============================================================================
1144 * FUNCTION   : enablePowerHint
1145 *
1146 * DESCRIPTION: enable single powerhint for preview and different video modes.
1147 *
1148 * PARAMETERS :
1149 *
1150 * RETURN     : NULL
1151 *
1152 *==========================================================================*/
1153void QCamera3HardwareInterface::enablePowerHint()
1154{
1155    if (!mPowerHintEnabled) {
1156        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1157        mPowerHintEnabled = true;
1158    }
1159}
1160
1161/*==============================================================================
1162 * FUNCTION   : disablePowerHint
1163 *
1164 * DESCRIPTION: disable current powerhint.
1165 *
1166 * PARAMETERS :
1167 *
1168 * RETURN     : NULL
1169 *
1170 *==========================================================================*/
1171void QCamera3HardwareInterface::disablePowerHint()
1172{
1173    if (mPowerHintEnabled) {
1174        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1175        mPowerHintEnabled = false;
1176    }
1177}
1178
1179/*==============================================================================
1180 * FUNCTION   : addToPPFeatureMask
1181 *
1182 * DESCRIPTION: add additional features to pp feature mask based on
1183 *              stream type and usecase
1184 *
1185 * PARAMETERS :
1186 *   @stream_format : stream type for feature mask
1187 *   @stream_idx : stream idx within postprocess_mask list to change
1188 *
1189 * RETURN     : NULL
1190 *
1191 *==========================================================================*/
1192void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1193        uint32_t stream_idx)
1194{
1195    char feature_mask_value[PROPERTY_VALUE_MAX];
1196    cam_feature_mask_t feature_mask;
1197    int args_converted;
1198    int property_len;
1199
1200    /* Get feature mask from property */
1201    property_len = property_get("persist.camera.hal3.feature",
1202            feature_mask_value, "0");
1203    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1204            (feature_mask_value[1] == 'x')) {
1205        args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1206    } else {
1207        args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1208    }
1209    if (1 != args_converted) {
1210        feature_mask = 0;
1211        LOGE("Wrong feature mask %s", feature_mask_value);
1212        return;
1213    }
1214
1215    switch (stream_format) {
1216    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1217        /* Add LLVD to pp feature mask only if video hint is enabled */
1218        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1219            mStreamConfigInfo.postprocess_mask[stream_idx]
1220                    |= CAM_QTI_FEATURE_SW_TNR;
1221            LOGH("Added SW TNR to pp feature mask");
1222        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1223            mStreamConfigInfo.postprocess_mask[stream_idx]
1224                    |= CAM_QCOM_FEATURE_LLVD;
1225            LOGH("Added LLVD SeeMore to pp feature mask");
1226        }
1227        break;
1228    }
1229    default:
1230        break;
1231    }
1232    LOGD("PP feature mask %llx",
1233            mStreamConfigInfo.postprocess_mask[stream_idx]);
1234}
1235
1236/*==============================================================================
1237 * FUNCTION   : updateFpsInPreviewBuffer
1238 *
1239 * DESCRIPTION: update FPS information in preview buffer.
1240 *
1241 * PARAMETERS :
1242 *   @metadata    : pointer to metadata buffer
1243 *   @frame_number: frame_number to look for in pending buffer list
1244 *
1245 * RETURN     : None
1246 *
1247 *==========================================================================*/
1248void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1249        uint32_t frame_number)
1250{
1251    // Mark all pending buffers for this particular request
1252    // with corresponding framerate information
1253    for (List<PendingBuffersInRequest>::iterator req =
1254            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1255            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1256        for(List<PendingBufferInfo>::iterator j =
1257                req->mPendingBufferList.begin();
1258                j != req->mPendingBufferList.end(); j++) {
1259            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1260            if ((req->frame_number == frame_number) &&
1261                (channel->getStreamTypeMask() &
1262                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1263                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1264                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1265                    int32_t cameraFps = float_range->max_fps;
1266                    struct private_handle_t *priv_handle =
1267                        (struct private_handle_t *)(*(j->buffer));
1268                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1269                }
1270            }
1271        }
1272    }
1273}
1274
1275/*===========================================================================
1276 * FUNCTION   : configureStreams
1277 *
1278 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1279 *              and output streams.
1280 *
1281 * PARAMETERS :
1282 *   @stream_list : streams to be configured
1283 *
1284 * RETURN     :
1285 *
1286 *==========================================================================*/
1287int QCamera3HardwareInterface::configureStreams(
1288        camera3_stream_configuration_t *streamList)
1289{
1290    ATRACE_CALL();
1291    int rc = 0;
1292
1293    // Acquire perfLock before configure streams
1294    m_perfLock.lock_acq();
1295    rc = configureStreamsPerfLocked(streamList);
1296    m_perfLock.lock_rel();
1297
1298    return rc;
1299}
1300
1301/*===========================================================================
1302 * FUNCTION   : configureStreamsPerfLocked
1303 *
1304 * DESCRIPTION: configureStreams while perfLock is held.
1305 *
1306 * PARAMETERS :
1307 *   @stream_list : streams to be configured
1308 *
1309 * RETURN     : int32_t type of status
1310 *              NO_ERROR  -- success
1311 *              none-zero failure code
1312 *==========================================================================*/
1313int QCamera3HardwareInterface::configureStreamsPerfLocked(
1314        camera3_stream_configuration_t *streamList)
1315{
1316    ATRACE_CALL();
1317    int rc = 0;
1318
1319    // Sanity check stream_list
1320    if (streamList == NULL) {
1321        LOGE("NULL stream configuration");
1322        return BAD_VALUE;
1323    }
1324    if (streamList->streams == NULL) {
1325        LOGE("NULL stream list");
1326        return BAD_VALUE;
1327    }
1328
1329    if (streamList->num_streams < 1) {
1330        LOGE("Bad number of streams requested: %d",
1331                streamList->num_streams);
1332        return BAD_VALUE;
1333    }
1334
1335    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1336        LOGE("Maximum number of streams %d exceeded: %d",
1337                MAX_NUM_STREAMS, streamList->num_streams);
1338        return BAD_VALUE;
1339    }
1340
1341    mOpMode = streamList->operation_mode;
1342    LOGD("mOpMode: %d", mOpMode);
1343
1344    /* first invalidate all the steams in the mStreamList
1345     * if they appear again, they will be validated */
1346    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1347            it != mStreamInfo.end(); it++) {
1348        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1349        if (channel) {
1350          channel->stop();
1351        }
1352        (*it)->status = INVALID;
1353    }
1354
1355    if (mRawDumpChannel) {
1356        mRawDumpChannel->stop();
1357        delete mRawDumpChannel;
1358        mRawDumpChannel = NULL;
1359    }
1360
1361    if (mSupportChannel)
1362        mSupportChannel->stop();
1363
1364    if (mAnalysisChannel) {
1365        mAnalysisChannel->stop();
1366    }
1367    if (mMetadataChannel) {
1368        /* If content of mStreamInfo is not 0, there is metadata stream */
1369        mMetadataChannel->stop();
1370    }
1371    if (mChannelHandle) {
1372        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1373                mChannelHandle);
1374        LOGD("stopping channel %d", mChannelHandle);
1375    }
1376
1377    pthread_mutex_lock(&mMutex);
1378
1379    // Check state
1380    switch (mState) {
1381        case INITIALIZED:
1382        case CONFIGURED:
1383        case STARTED:
1384            /* valid state */
1385            break;
1386        default:
1387            LOGE("Invalid state %d", mState);
1388            pthread_mutex_unlock(&mMutex);
1389            return -ENODEV;
1390    }
1391
1392    /* Check whether we have video stream */
1393    m_bIs4KVideo = false;
1394    m_bIsVideo = false;
1395    m_bEisSupportedSize = false;
1396    m_bTnrEnabled = false;
1397    bool isZsl = false;
1398    uint32_t videoWidth = 0U;
1399    uint32_t videoHeight = 0U;
1400    size_t rawStreamCnt = 0;
1401    size_t stallStreamCnt = 0;
1402    size_t processedStreamCnt = 0;
1403    // Number of streams on ISP encoder path
1404    size_t numStreamsOnEncoder = 0;
1405    size_t numYuv888OnEncoder = 0;
1406    bool bYuv888OverrideJpeg = false;
1407    cam_dimension_t largeYuv888Size = {0, 0};
1408    cam_dimension_t maxViewfinderSize = {0, 0};
1409    bool bJpegExceeds4K = false;
1410    bool bJpegOnEncoder = false;
1411    bool bUseCommonFeatureMask = false;
1412    cam_feature_mask_t commonFeatureMask = 0;
1413    bool bSmallJpegSize = false;
1414    uint32_t width_ratio;
1415    uint32_t height_ratio;
1416    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1417    camera3_stream_t *inputStream = NULL;
1418    bool isJpeg = false;
1419    cam_dimension_t jpegSize = {0, 0};
1420
1421    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1422
1423    /*EIS configuration*/
1424    bool eisSupported = false;
1425    bool oisSupported = false;
1426    int32_t margin_index = -1;
1427    uint8_t eis_prop_set;
1428    uint32_t maxEisWidth = 0;
1429    uint32_t maxEisHeight = 0;
1430
1431    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1432
1433    size_t count = IS_TYPE_MAX;
1434    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1435    for (size_t i = 0; i < count; i++) {
1436        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1437            eisSupported = true;
1438            margin_index = (int32_t)i;
1439            break;
1440        }
1441    }
1442
1443    count = CAM_OPT_STAB_MAX;
1444    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1445    for (size_t i = 0; i < count; i++) {
1446        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1447            oisSupported = true;
1448            break;
1449        }
1450    }
1451
1452    if (eisSupported) {
1453        maxEisWidth = MAX_EIS_WIDTH;
1454        maxEisHeight = MAX_EIS_HEIGHT;
1455    }
1456
1457    /* EIS setprop control */
1458    char eis_prop[PROPERTY_VALUE_MAX];
1459    memset(eis_prop, 0, sizeof(eis_prop));
1460    property_get("persist.camera.eis.enable", eis_prop, "0");
1461    eis_prop_set = (uint8_t)atoi(eis_prop);
1462
1463    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1464            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1465
1466    /* stream configurations */
1467    for (size_t i = 0; i < streamList->num_streams; i++) {
1468        camera3_stream_t *newStream = streamList->streams[i];
1469        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1470                "height = %d, rotation = %d, usage = 0x%x",
1471                 i, newStream->stream_type, newStream->format,
1472                newStream->width, newStream->height, newStream->rotation,
1473                newStream->usage);
1474        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1475                newStream->stream_type == CAMERA3_STREAM_INPUT){
1476            isZsl = true;
1477        }
1478        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1479            inputStream = newStream;
1480        }
1481
1482        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1483            isJpeg = true;
1484            jpegSize.width = newStream->width;
1485            jpegSize.height = newStream->height;
1486            if (newStream->width > VIDEO_4K_WIDTH ||
1487                    newStream->height > VIDEO_4K_HEIGHT)
1488                bJpegExceeds4K = true;
1489        }
1490
1491        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1492                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1493            m_bIsVideo = true;
1494            videoWidth = newStream->width;
1495            videoHeight = newStream->height;
1496            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1497                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1498                m_bIs4KVideo = true;
1499            }
1500            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1501                                  (newStream->height <= maxEisHeight);
1502        }
1503        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1504                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1505            switch (newStream->format) {
1506            case HAL_PIXEL_FORMAT_BLOB:
1507                stallStreamCnt++;
1508                if (isOnEncoder(maxViewfinderSize, newStream->width,
1509                        newStream->height)) {
1510                    numStreamsOnEncoder++;
1511                    bJpegOnEncoder = true;
1512                }
1513                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1514                        newStream->width);
1515                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1516                        newStream->height);;
1517                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1518                        "FATAL: max_downscale_factor cannot be zero and so assert");
1519                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1520                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1521                    LOGH("Setting small jpeg size flag to true");
1522                    bSmallJpegSize = true;
1523                }
1524                break;
1525            case HAL_PIXEL_FORMAT_RAW10:
1526            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1527            case HAL_PIXEL_FORMAT_RAW16:
1528                rawStreamCnt++;
1529                break;
1530            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1531                processedStreamCnt++;
1532                if (isOnEncoder(maxViewfinderSize, newStream->width,
1533                        newStream->height)) {
1534                    if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1535                            !IS_USAGE_ZSL(newStream->usage)) {
1536                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1537                    }
1538                    numStreamsOnEncoder++;
1539                }
1540                break;
1541            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1542                processedStreamCnt++;
1543                if (isOnEncoder(maxViewfinderSize, newStream->width,
1544                        newStream->height)) {
1545                    // If Yuv888 size is not greater than 4K, set feature mask
1546                    // to SUPERSET so that it support concurrent request on
1547                    // YUV and JPEG.
1548                    if (newStream->width <= VIDEO_4K_WIDTH &&
1549                            newStream->height <= VIDEO_4K_HEIGHT) {
1550                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1551                    }
1552                    numStreamsOnEncoder++;
1553                    numYuv888OnEncoder++;
1554                    largeYuv888Size.width = newStream->width;
1555                    largeYuv888Size.height = newStream->height;
1556                }
1557                break;
1558            default:
1559                processedStreamCnt++;
1560                if (isOnEncoder(maxViewfinderSize, newStream->width,
1561                        newStream->height)) {
1562                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1563                    numStreamsOnEncoder++;
1564                }
1565                break;
1566            }
1567
1568        }
1569    }
1570
1571    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1572        !m_bIsVideo) {
1573        m_bEisEnable = false;
1574    }
1575
1576    /* Logic to enable/disable TNR based on specific config size/etc.*/
1577    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1578            ((videoWidth == 1920 && videoHeight == 1080) ||
1579            (videoWidth == 1280 && videoHeight == 720)) &&
1580            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1581        m_bTnrEnabled = true;
1582
1583    /* Check if num_streams is sane */
1584    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1585            rawStreamCnt > MAX_RAW_STREAMS ||
1586            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1587        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1588                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1589        pthread_mutex_unlock(&mMutex);
1590        return -EINVAL;
1591    }
1592    /* Check whether we have zsl stream or 4k video case */
1593    if (isZsl && m_bIsVideo) {
1594        LOGE("Currently invalid configuration ZSL&Video!");
1595        pthread_mutex_unlock(&mMutex);
1596        return -EINVAL;
1597    }
1598    /* Check if stream sizes are sane */
1599    if (numStreamsOnEncoder > 2) {
1600        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1601        pthread_mutex_unlock(&mMutex);
1602        return -EINVAL;
1603    } else if (1 < numStreamsOnEncoder){
1604        bUseCommonFeatureMask = true;
1605        LOGH("Multiple streams above max viewfinder size, common mask needed");
1606    }
1607
1608    /* Check if BLOB size is greater than 4k in 4k recording case */
1609    if (m_bIs4KVideo && bJpegExceeds4K) {
1610        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1611        pthread_mutex_unlock(&mMutex);
1612        return -EINVAL;
1613    }
1614
1615    // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1616    // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1617    // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1618    // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1619    // configurations:
1620    //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1621    //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1622    //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1623    if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1624        ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1625                __func__);
1626        pthread_mutex_unlock(&mMutex);
1627        return -EINVAL;
1628    }
1629
1630    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1631    // the YUV stream's size is greater or equal to the JPEG size, set common
1632    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1633    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1634            jpegSize.width, jpegSize.height) &&
1635            largeYuv888Size.width > jpegSize.width &&
1636            largeYuv888Size.height > jpegSize.height) {
1637        bYuv888OverrideJpeg = true;
1638    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1639        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1640    }
1641
1642    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1643            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1644            commonFeatureMask);
1645    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1646            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1647
1648    rc = validateStreamDimensions(streamList);
1649    if (rc == NO_ERROR) {
1650        rc = validateStreamRotations(streamList);
1651    }
1652    if (rc != NO_ERROR) {
1653        LOGE("Invalid stream configuration requested!");
1654        pthread_mutex_unlock(&mMutex);
1655        return rc;
1656    }
1657
1658    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1659    camera3_stream_t *jpegStream = NULL;
1660    for (size_t i = 0; i < streamList->num_streams; i++) {
1661        camera3_stream_t *newStream = streamList->streams[i];
1662        LOGH("newStream type = %d, stream format = %d "
1663                "stream size : %d x %d, stream rotation = %d",
1664                 newStream->stream_type, newStream->format,
1665                newStream->width, newStream->height, newStream->rotation);
1666        //if the stream is in the mStreamList validate it
1667        bool stream_exists = false;
1668        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1669                it != mStreamInfo.end(); it++) {
1670            if ((*it)->stream == newStream) {
1671                QCamera3ProcessingChannel *channel =
1672                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1673                stream_exists = true;
1674                if (channel)
1675                    delete channel;
1676                (*it)->status = VALID;
1677                (*it)->stream->priv = NULL;
1678                (*it)->channel = NULL;
1679            }
1680        }
1681        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1682            //new stream
1683            stream_info_t* stream_info;
1684            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1685            if (!stream_info) {
1686               LOGE("Could not allocate stream info");
1687               rc = -ENOMEM;
1688               pthread_mutex_unlock(&mMutex);
1689               return rc;
1690            }
1691            stream_info->stream = newStream;
1692            stream_info->status = VALID;
1693            stream_info->channel = NULL;
1694            mStreamInfo.push_back(stream_info);
1695        }
1696        /* Covers Opaque ZSL and API1 F/W ZSL */
1697        if (IS_USAGE_ZSL(newStream->usage)
1698                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1699            if (zslStream != NULL) {
1700                LOGE("Multiple input/reprocess streams requested!");
1701                pthread_mutex_unlock(&mMutex);
1702                return BAD_VALUE;
1703            }
1704            zslStream = newStream;
1705        }
1706        /* Covers YUV reprocess */
1707        if (inputStream != NULL) {
1708            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1709                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1710                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1711                    && inputStream->width == newStream->width
1712                    && inputStream->height == newStream->height) {
1713                if (zslStream != NULL) {
1714                    /* This scenario indicates multiple YUV streams with same size
1715                     * as input stream have been requested, since zsl stream handle
1716                     * is solely use for the purpose of overriding the size of streams
1717                     * which share h/w streams we will just make a guess here as to
1718                     * which of the stream is a ZSL stream, this will be refactored
1719                     * once we make generic logic for streams sharing encoder output
1720                     */
1721                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1722                }
1723                zslStream = newStream;
1724            }
1725        }
1726        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1727            jpegStream = newStream;
1728        }
1729    }
1730
1731    /* If a zsl stream is set, we know that we have configured at least one input or
1732       bidirectional stream */
1733    if (NULL != zslStream) {
1734        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1735        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1736        mInputStreamInfo.format = zslStream->format;
1737        mInputStreamInfo.usage = zslStream->usage;
1738        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1739                 mInputStreamInfo.dim.width,
1740                mInputStreamInfo.dim.height,
1741                mInputStreamInfo.format, mInputStreamInfo.usage);
1742    }
1743
1744    cleanAndSortStreamInfo();
1745    if (mMetadataChannel) {
1746        delete mMetadataChannel;
1747        mMetadataChannel = NULL;
1748    }
1749    if (mSupportChannel) {
1750        delete mSupportChannel;
1751        mSupportChannel = NULL;
1752    }
1753
1754    if (mAnalysisChannel) {
1755        delete mAnalysisChannel;
1756        mAnalysisChannel = NULL;
1757    }
1758
1759    if (mDummyBatchChannel) {
1760        delete mDummyBatchChannel;
1761        mDummyBatchChannel = NULL;
1762    }
1763
1764    //Create metadata channel and initialize it
1765    cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1766    setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1767            gCamCapability[mCameraId]->color_arrangement);
1768    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1769                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1770                    &padding_info, metadataFeatureMask, this);
1771    if (mMetadataChannel == NULL) {
1772        LOGE("failed to allocate metadata channel");
1773        rc = -ENOMEM;
1774        pthread_mutex_unlock(&mMutex);
1775        return rc;
1776    }
1777    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1778    if (rc < 0) {
1779        LOGE("metadata channel initialization failed");
1780        delete mMetadataChannel;
1781        mMetadataChannel = NULL;
1782        pthread_mutex_unlock(&mMutex);
1783        return rc;
1784    }
1785
1786    // Create analysis stream all the time, even when h/w support is not available
1787    {
1788        cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1789        setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1790                gCamCapability[mCameraId]->color_arrangement);
1791        cam_analysis_info_t analysisInfo;
1792        rc = mCommon.getAnalysisInfo(
1793                FALSE,
1794                TRUE,
1795                analysisFeatureMask,
1796                &analysisInfo);
1797        if (rc != NO_ERROR) {
1798            LOGE("getAnalysisInfo failed, ret = %d", rc);
1799            pthread_mutex_unlock(&mMutex);
1800            return rc;
1801        }
1802
1803        mAnalysisChannel = new QCamera3SupportChannel(
1804                mCameraHandle->camera_handle,
1805                mChannelHandle,
1806                mCameraHandle->ops,
1807                &analysisInfo.analysis_padding_info,
1808                analysisFeatureMask,
1809                CAM_STREAM_TYPE_ANALYSIS,
1810                &analysisInfo.analysis_max_res,
1811                (analysisInfo.analysis_format
1812                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1813                : CAM_FORMAT_YUV_420_NV21),
1814                analysisInfo.hw_analysis_supported,
1815                this,
1816                0); // force buffer count to 0
1817        if (!mAnalysisChannel) {
1818            LOGE("H/W Analysis channel cannot be created");
1819            pthread_mutex_unlock(&mMutex);
1820            return -ENOMEM;
1821        }
1822    }
1823
1824    bool isRawStreamRequested = false;
1825    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1826    /* Allocate channel objects for the requested streams */
1827    for (size_t i = 0; i < streamList->num_streams; i++) {
1828        camera3_stream_t *newStream = streamList->streams[i];
1829        uint32_t stream_usage = newStream->usage;
1830        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1831        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1832        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1833                || IS_USAGE_ZSL(newStream->usage)) &&
1834            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1835            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1836            if (bUseCommonFeatureMask) {
1837                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1838                        commonFeatureMask;
1839            } else {
1840                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1841                        CAM_QCOM_FEATURE_NONE;
1842            }
1843
1844        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1845                LOGH("Input stream configured, reprocess config");
1846        } else {
1847            //for non zsl streams find out the format
1848            switch (newStream->format) {
1849            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1850            {
1851                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1852                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1853                /* add additional features to pp feature mask */
1854                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1855                        mStreamConfigInfo.num_streams);
1856
1857                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1858                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1859                                CAM_STREAM_TYPE_VIDEO;
1860                    if (m_bTnrEnabled && m_bTnrVideo) {
1861                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1862                            CAM_QCOM_FEATURE_CPP_TNR;
1863                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1864                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1865                                ~CAM_QCOM_FEATURE_CDS;
1866                    }
1867                } else {
1868                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1869                            CAM_STREAM_TYPE_PREVIEW;
1870                    if (m_bTnrEnabled && m_bTnrPreview) {
1871                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1872                                CAM_QCOM_FEATURE_CPP_TNR;
1873                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1874                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1875                                ~CAM_QCOM_FEATURE_CDS;
1876                    }
1877                    padding_info.width_padding = mSurfaceStridePadding;
1878                    padding_info.height_padding = CAM_PAD_TO_2;
1879                }
1880                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1881                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1882                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1883                            newStream->height;
1884                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1885                            newStream->width;
1886                }
1887            }
1888            break;
1889            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1890                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1891                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1892                    if (bUseCommonFeatureMask)
1893                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1894                                commonFeatureMask;
1895                    else
1896                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1897                                CAM_QCOM_FEATURE_NONE;
1898                } else {
1899                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1900                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1901                }
1902            break;
1903            case HAL_PIXEL_FORMAT_BLOB:
1904                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1905                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1906                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1907                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1908                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1909                } else {
1910                    if (bUseCommonFeatureMask &&
1911                            isOnEncoder(maxViewfinderSize, newStream->width,
1912                            newStream->height)) {
1913                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1914                    } else {
1915                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1916                    }
1917                }
1918                if (isZsl) {
1919                    if (zslStream) {
1920                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1921                                (int32_t)zslStream->width;
1922                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1923                                (int32_t)zslStream->height;
1924                    } else {
1925                        LOGE("Error, No ZSL stream identified");
1926                        pthread_mutex_unlock(&mMutex);
1927                        return -EINVAL;
1928                    }
1929                } else if (m_bIs4KVideo) {
1930                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1931                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1932                } else if (bYuv888OverrideJpeg) {
1933                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1934                            (int32_t)largeYuv888Size.width;
1935                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1936                            (int32_t)largeYuv888Size.height;
1937                }
1938                break;
1939            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1940            case HAL_PIXEL_FORMAT_RAW16:
1941            case HAL_PIXEL_FORMAT_RAW10:
1942                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1943                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1944                isRawStreamRequested = true;
1945                break;
1946            default:
1947                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1948                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1949                break;
1950            }
1951        }
1952
1953        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1954                (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1955                gCamCapability[mCameraId]->color_arrangement);
1956
1957        if (newStream->priv == NULL) {
1958            //New stream, construct channel
1959            switch (newStream->stream_type) {
1960            case CAMERA3_STREAM_INPUT:
1961                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1962                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1963                break;
1964            case CAMERA3_STREAM_BIDIRECTIONAL:
1965                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1966                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1967                break;
1968            case CAMERA3_STREAM_OUTPUT:
1969                /* For video encoding stream, set read/write rarely
1970                 * flag so that they may be set to un-cached */
1971                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1972                    newStream->usage |=
1973                         (GRALLOC_USAGE_SW_READ_RARELY |
1974                         GRALLOC_USAGE_SW_WRITE_RARELY |
1975                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1976                else if (IS_USAGE_ZSL(newStream->usage))
1977                {
1978                    LOGD("ZSL usage flag skipping");
1979                }
1980                else if (newStream == zslStream
1981                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1982                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1983                } else
1984                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1985                break;
1986            default:
1987                LOGE("Invalid stream_type %d", newStream->stream_type);
1988                break;
1989            }
1990
1991            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1992                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1993                QCamera3ProcessingChannel *channel = NULL;
1994                switch (newStream->format) {
1995                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1996                    if ((newStream->usage &
1997                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1998                            (streamList->operation_mode ==
1999                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2000                    ) {
2001                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2002                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2003                                &gCamCapability[mCameraId]->padding_info,
2004                                this,
2005                                newStream,
2006                                (cam_stream_type_t)
2007                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2008                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2009                                mMetadataChannel,
2010                                0); //heap buffers are not required for HFR video channel
2011                        if (channel == NULL) {
2012                            LOGE("allocation of channel failed");
2013                            pthread_mutex_unlock(&mMutex);
2014                            return -ENOMEM;
2015                        }
2016                        //channel->getNumBuffers() will return 0 here so use
2017                        //MAX_INFLIGH_HFR_REQUESTS
2018                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2019                        newStream->priv = channel;
2020                        LOGI("num video buffers in HFR mode: %d",
2021                                 MAX_INFLIGHT_HFR_REQUESTS);
2022                    } else {
2023                        /* Copy stream contents in HFR preview only case to create
2024                         * dummy batch channel so that sensor streaming is in
2025                         * HFR mode */
2026                        if (!m_bIsVideo && (streamList->operation_mode ==
2027                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2028                            mDummyBatchStream = *newStream;
2029                        }
2030                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2031                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2032                                &gCamCapability[mCameraId]->padding_info,
2033                                this,
2034                                newStream,
2035                                (cam_stream_type_t)
2036                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2037                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2038                                mMetadataChannel,
2039                                MAX_INFLIGHT_REQUESTS);
2040                        if (channel == NULL) {
2041                            LOGE("allocation of channel failed");
2042                            pthread_mutex_unlock(&mMutex);
2043                            return -ENOMEM;
2044                        }
2045                        newStream->max_buffers = channel->getNumBuffers();
2046                        newStream->priv = channel;
2047                    }
2048                    break;
2049                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2050                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2051                            mChannelHandle,
2052                            mCameraHandle->ops, captureResultCb,
2053                            &padding_info,
2054                            this,
2055                            newStream,
2056                            (cam_stream_type_t)
2057                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2058                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2059                            mMetadataChannel);
2060                    if (channel == NULL) {
2061                        LOGE("allocation of YUV channel failed");
2062                        pthread_mutex_unlock(&mMutex);
2063                        return -ENOMEM;
2064                    }
2065                    newStream->max_buffers = channel->getNumBuffers();
2066                    newStream->priv = channel;
2067                    break;
2068                }
2069                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2070                case HAL_PIXEL_FORMAT_RAW16:
2071                case HAL_PIXEL_FORMAT_RAW10:
2072                    mRawChannel = new QCamera3RawChannel(
2073                            mCameraHandle->camera_handle, mChannelHandle,
2074                            mCameraHandle->ops, captureResultCb,
2075                            &padding_info,
2076                            this, newStream,
2077                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2078                            mMetadataChannel,
2079                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2080                    if (mRawChannel == NULL) {
2081                        LOGE("allocation of raw channel failed");
2082                        pthread_mutex_unlock(&mMutex);
2083                        return -ENOMEM;
2084                    }
2085                    newStream->max_buffers = mRawChannel->getNumBuffers();
2086                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2087                    break;
2088                case HAL_PIXEL_FORMAT_BLOB:
2089                    // Max live snapshot inflight buffer is 1. This is to mitigate
2090                    // frame drop issues for video snapshot. The more buffers being
2091                    // allocated, the more frame drops there are.
2092                    mPictureChannel = new QCamera3PicChannel(
2093                            mCameraHandle->camera_handle, mChannelHandle,
2094                            mCameraHandle->ops, captureResultCb,
2095                            &padding_info, this, newStream,
2096                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2097                            m_bIs4KVideo, isZsl, mMetadataChannel,
2098                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2099                    if (mPictureChannel == NULL) {
2100                        LOGE("allocation of channel failed");
2101                        pthread_mutex_unlock(&mMutex);
2102                        return -ENOMEM;
2103                    }
2104                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2105                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2106                    mPictureChannel->overrideYuvSize(
2107                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2108                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2109                    break;
2110
2111                default:
2112                    LOGE("not a supported format 0x%x", newStream->format);
2113                    break;
2114                }
2115            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2116                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2117            } else {
2118                LOGE("Error, Unknown stream type");
2119                pthread_mutex_unlock(&mMutex);
2120                return -EINVAL;
2121            }
2122
2123            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2124            if (channel != NULL && channel->isUBWCEnabled()) {
2125                cam_format_t fmt = channel->getStreamDefaultFormat(
2126                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2127                        newStream->width, newStream->height);
2128                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2129                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2130                }
2131            }
2132
2133            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2134                    it != mStreamInfo.end(); it++) {
2135                if ((*it)->stream == newStream) {
2136                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2137                    break;
2138                }
2139            }
2140        } else {
2141            // Channel already exists for this stream
2142            // Do nothing for now
2143        }
2144        padding_info = gCamCapability[mCameraId]->padding_info;
2145
2146        /* Do not add entries for input stream in metastream info
2147         * since there is no real stream associated with it
2148         */
2149        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2150            mStreamConfigInfo.num_streams++;
2151    }
2152
2153    //RAW DUMP channel
2154    if (mEnableRawDump && isRawStreamRequested == false){
2155        cam_dimension_t rawDumpSize;
2156        rawDumpSize = getMaxRawSize(mCameraId);
2157        cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2158        setPAAFSupport(rawDumpFeatureMask,
2159                CAM_STREAM_TYPE_RAW,
2160                gCamCapability[mCameraId]->color_arrangement);
2161        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2162                                  mChannelHandle,
2163                                  mCameraHandle->ops,
2164                                  rawDumpSize,
2165                                  &padding_info,
2166                                  this, rawDumpFeatureMask);
2167        if (!mRawDumpChannel) {
2168            LOGE("Raw Dump channel cannot be created");
2169            pthread_mutex_unlock(&mMutex);
2170            return -ENOMEM;
2171        }
2172    }
2173
2174
2175    if (mAnalysisChannel) {
2176        cam_analysis_info_t analysisInfo;
2177        memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2178        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2179                CAM_STREAM_TYPE_ANALYSIS;
2180        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2181                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2182        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2183                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2184                gCamCapability[mCameraId]->color_arrangement);
2185        rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2186                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2187                &analysisInfo);
2188        if (rc != NO_ERROR) {
2189            LOGE("getAnalysisInfo failed, ret = %d", rc);
2190            pthread_mutex_unlock(&mMutex);
2191            return rc;
2192        }
2193        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2194                analysisInfo.analysis_max_res;
2195        mStreamConfigInfo.num_streams++;
2196    }
2197
2198    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2199        cam_analysis_info_t supportInfo;
2200        memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2201        cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2202        setPAAFSupport(callbackFeatureMask,
2203                CAM_STREAM_TYPE_CALLBACK,
2204                gCamCapability[mCameraId]->color_arrangement);
2205        rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2206        if (rc != NO_ERROR) {
2207            LOGE("getAnalysisInfo failed, ret = %d", rc);
2208            pthread_mutex_unlock(&mMutex);
2209            return rc;
2210        }
2211        mSupportChannel = new QCamera3SupportChannel(
2212                mCameraHandle->camera_handle,
2213                mChannelHandle,
2214                mCameraHandle->ops,
2215                &gCamCapability[mCameraId]->padding_info,
2216                callbackFeatureMask,
2217                CAM_STREAM_TYPE_CALLBACK,
2218                &QCamera3SupportChannel::kDim,
2219                CAM_FORMAT_YUV_420_NV21,
2220                supportInfo.hw_analysis_supported,
2221                this, 0);
2222        if (!mSupportChannel) {
2223            LOGE("dummy channel cannot be created");
2224            pthread_mutex_unlock(&mMutex);
2225            return -ENOMEM;
2226        }
2227    }
2228
2229    if (mSupportChannel) {
2230        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2231                QCamera3SupportChannel::kDim;
2232        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2233                CAM_STREAM_TYPE_CALLBACK;
2234        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2235                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2236        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2237                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2238                gCamCapability[mCameraId]->color_arrangement);
2239        mStreamConfigInfo.num_streams++;
2240    }
2241
2242    if (mRawDumpChannel) {
2243        cam_dimension_t rawSize;
2244        rawSize = getMaxRawSize(mCameraId);
2245        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2246                rawSize;
2247        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2248                CAM_STREAM_TYPE_RAW;
2249        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2250                CAM_QCOM_FEATURE_NONE;
2251        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2252                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2253                gCamCapability[mCameraId]->color_arrangement);
2254        mStreamConfigInfo.num_streams++;
2255    }
2256    /* In HFR mode, if video stream is not added, create a dummy channel so that
2257     * ISP can create a batch mode even for preview only case. This channel is
2258     * never 'start'ed (no stream-on), it is only 'initialized'  */
2259    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2260            !m_bIsVideo) {
2261        cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2262        setPAAFSupport(dummyFeatureMask,
2263                CAM_STREAM_TYPE_VIDEO,
2264                gCamCapability[mCameraId]->color_arrangement);
2265        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2266                mChannelHandle,
2267                mCameraHandle->ops, captureResultCb,
2268                &gCamCapability[mCameraId]->padding_info,
2269                this,
2270                &mDummyBatchStream,
2271                CAM_STREAM_TYPE_VIDEO,
2272                dummyFeatureMask,
2273                mMetadataChannel);
2274        if (NULL == mDummyBatchChannel) {
2275            LOGE("creation of mDummyBatchChannel failed."
2276                    "Preview will use non-hfr sensor mode ");
2277        }
2278    }
2279    if (mDummyBatchChannel) {
2280        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2281                mDummyBatchStream.width;
2282        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2283                mDummyBatchStream.height;
2284        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2285                CAM_STREAM_TYPE_VIDEO;
2286        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2287                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2288        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2289                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2290                gCamCapability[mCameraId]->color_arrangement);
2291        mStreamConfigInfo.num_streams++;
2292    }
2293
2294    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2295    mStreamConfigInfo.buffer_info.max_buffers =
2296            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2297
2298    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2299    for (pendingRequestIterator i = mPendingRequestsList.begin();
2300            i != mPendingRequestsList.end();) {
2301        i = erasePendingRequest(i);
2302    }
2303    mPendingFrameDropList.clear();
2304    // Initialize/Reset the pending buffers list
2305    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2306        req.mPendingBufferList.clear();
2307    }
2308    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2309
2310    mPendingReprocessResultList.clear();
2311
2312    mCurJpegMeta.clear();
2313    //Get min frame duration for this streams configuration
2314    deriveMinFrameDuration();
2315
2316    // Update state
2317    mState = CONFIGURED;
2318
2319    pthread_mutex_unlock(&mMutex);
2320
2321    return rc;
2322}
2323
2324/*===========================================================================
2325 * FUNCTION   : validateCaptureRequest
2326 *
2327 * DESCRIPTION: validate a capture request from camera service
2328 *
2329 * PARAMETERS :
2330 *   @request : request from framework to process
2331 *
2332 * RETURN     :
2333 *
2334 *==========================================================================*/
2335int QCamera3HardwareInterface::validateCaptureRequest(
2336                    camera3_capture_request_t *request)
2337{
2338    ssize_t idx = 0;
2339    const camera3_stream_buffer_t *b;
2340    CameraMetadata meta;
2341
2342    /* Sanity check the request */
2343    if (request == NULL) {
2344        LOGE("NULL capture request");
2345        return BAD_VALUE;
2346    }
2347
2348    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2349        /*settings cannot be null for the first request*/
2350        return BAD_VALUE;
2351    }
2352
2353    uint32_t frameNumber = request->frame_number;
2354    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2355        LOGE("Request %d: No output buffers provided!",
2356                __FUNCTION__, frameNumber);
2357        return BAD_VALUE;
2358    }
2359    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2360        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2361                 request->num_output_buffers, MAX_NUM_STREAMS);
2362        return BAD_VALUE;
2363    }
2364    if (request->input_buffer != NULL) {
2365        b = request->input_buffer;
2366        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2367            LOGE("Request %d: Buffer %ld: Status not OK!",
2368                     frameNumber, (long)idx);
2369            return BAD_VALUE;
2370        }
2371        if (b->release_fence != -1) {
2372            LOGE("Request %d: Buffer %ld: Has a release fence!",
2373                     frameNumber, (long)idx);
2374            return BAD_VALUE;
2375        }
2376        if (b->buffer == NULL) {
2377            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2378                     frameNumber, (long)idx);
2379            return BAD_VALUE;
2380        }
2381    }
2382
2383    // Validate all buffers
2384    b = request->output_buffers;
2385    do {
2386        QCamera3ProcessingChannel *channel =
2387                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2388        if (channel == NULL) {
2389            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2390                     frameNumber, (long)idx);
2391            return BAD_VALUE;
2392        }
2393        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2394            LOGE("Request %d: Buffer %ld: Status not OK!",
2395                     frameNumber, (long)idx);
2396            return BAD_VALUE;
2397        }
2398        if (b->release_fence != -1) {
2399            LOGE("Request %d: Buffer %ld: Has a release fence!",
2400                     frameNumber, (long)idx);
2401            return BAD_VALUE;
2402        }
2403        if (b->buffer == NULL) {
2404            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2405                     frameNumber, (long)idx);
2406            return BAD_VALUE;
2407        }
2408        if (*(b->buffer) == NULL) {
2409            LOGE("Request %d: Buffer %ld: NULL private handle!",
2410                     frameNumber, (long)idx);
2411            return BAD_VALUE;
2412        }
2413        idx++;
2414        b = request->output_buffers + idx;
2415    } while (idx < (ssize_t)request->num_output_buffers);
2416
2417    return NO_ERROR;
2418}
2419
2420/*===========================================================================
2421 * FUNCTION   : deriveMinFrameDuration
2422 *
2423 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2424 *              on currently configured streams.
2425 *
2426 * PARAMETERS : NONE
2427 *
2428 * RETURN     : NONE
2429 *
2430 *==========================================================================*/
2431void QCamera3HardwareInterface::deriveMinFrameDuration()
2432{
2433    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2434
2435    maxJpegDim = 0;
2436    maxProcessedDim = 0;
2437    maxRawDim = 0;
2438
2439    // Figure out maximum jpeg, processed, and raw dimensions
2440    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2441        it != mStreamInfo.end(); it++) {
2442
2443        // Input stream doesn't have valid stream_type
2444        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2445            continue;
2446
2447        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2448        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2449            if (dimension > maxJpegDim)
2450                maxJpegDim = dimension;
2451        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2452                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2453                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2454            if (dimension > maxRawDim)
2455                maxRawDim = dimension;
2456        } else {
2457            if (dimension > maxProcessedDim)
2458                maxProcessedDim = dimension;
2459        }
2460    }
2461
2462    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2463            MAX_SIZES_CNT);
2464
2465    //Assume all jpeg dimensions are in processed dimensions.
2466    if (maxJpegDim > maxProcessedDim)
2467        maxProcessedDim = maxJpegDim;
2468    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2469    if (maxProcessedDim > maxRawDim) {
2470        maxRawDim = INT32_MAX;
2471
2472        for (size_t i = 0; i < count; i++) {
2473            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2474                    gCamCapability[mCameraId]->raw_dim[i].height;
2475            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2476                maxRawDim = dimension;
2477        }
2478    }
2479
2480    //Find minimum durations for processed, jpeg, and raw
2481    for (size_t i = 0; i < count; i++) {
2482        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2483                gCamCapability[mCameraId]->raw_dim[i].height) {
2484            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2485            break;
2486        }
2487    }
2488    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2489    for (size_t i = 0; i < count; i++) {
2490        if (maxProcessedDim ==
2491                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2492                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2493            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2494            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2495            break;
2496        }
2497    }
2498}
2499
2500/*===========================================================================
2501 * FUNCTION   : getMinFrameDuration
2502 *
2503 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2504 *              and current request configuration.
2505 *
2506 * PARAMETERS : @request: requset sent by the frameworks
2507 *
2508 * RETURN     : min farme duration for a particular request
2509 *
2510 *==========================================================================*/
2511int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2512{
2513    bool hasJpegStream = false;
2514    bool hasRawStream = false;
2515    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2516        const camera3_stream_t *stream = request->output_buffers[i].stream;
2517        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2518            hasJpegStream = true;
2519        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2520                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2521                stream->format == HAL_PIXEL_FORMAT_RAW16)
2522            hasRawStream = true;
2523    }
2524
2525    if (!hasJpegStream)
2526        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2527    else
2528        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2529}
2530
2531/*===========================================================================
2532 * FUNCTION   : handleBuffersDuringFlushLock
2533 *
2534 * DESCRIPTION: Account for buffers returned from back-end during flush
2535 *              This function is executed while mMutex is held by the caller.
2536 *
2537 * PARAMETERS :
2538 *   @buffer: image buffer for the callback
2539 *
2540 * RETURN     :
2541 *==========================================================================*/
2542void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2543{
2544    bool buffer_found = false;
2545    for (List<PendingBuffersInRequest>::iterator req =
2546            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2547            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2548        for (List<PendingBufferInfo>::iterator i =
2549                req->mPendingBufferList.begin();
2550                i != req->mPendingBufferList.end(); i++) {
2551            if (i->buffer == buffer->buffer) {
2552                mPendingBuffersMap.numPendingBufsAtFlush--;
2553                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2554                    buffer->buffer, req->frame_number,
2555                    mPendingBuffersMap.numPendingBufsAtFlush);
2556                buffer_found = true;
2557                break;
2558            }
2559        }
2560        if (buffer_found) {
2561            break;
2562        }
2563    }
2564    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2565        //signal the flush()
2566        LOGD("All buffers returned to HAL. Continue flush");
2567        pthread_cond_signal(&mBuffersCond);
2568    }
2569}
2570
2571
2572/*===========================================================================
2573 * FUNCTION   : handlePendingReprocResults
2574 *
2575 * DESCRIPTION: check and notify on any pending reprocess results
2576 *
2577 * PARAMETERS :
2578 *   @frame_number   : Pending request frame number
2579 *
2580 * RETURN     : int32_t type of status
2581 *              NO_ERROR  -- success
2582 *              none-zero failure code
2583 *==========================================================================*/
2584int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2585{
2586    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2587            j != mPendingReprocessResultList.end(); j++) {
2588        if (j->frame_number == frame_number) {
2589            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2590
2591            LOGD("Delayed reprocess notify %d",
2592                    frame_number);
2593
2594            for (pendingRequestIterator k = mPendingRequestsList.begin();
2595                    k != mPendingRequestsList.end(); k++) {
2596
2597                if (k->frame_number == j->frame_number) {
2598                    LOGD("Found reprocess frame number %d in pending reprocess List "
2599                            "Take it out!!",
2600                            k->frame_number);
2601
2602                    camera3_capture_result result;
2603                    memset(&result, 0, sizeof(camera3_capture_result));
2604                    result.frame_number = frame_number;
2605                    result.num_output_buffers = 1;
2606                    result.output_buffers =  &j->buffer;
2607                    result.input_buffer = k->input_buffer;
2608                    result.result = k->settings;
2609                    result.partial_result = PARTIAL_RESULT_COUNT;
2610                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2611
2612                    erasePendingRequest(k);
2613                    break;
2614                }
2615            }
2616            mPendingReprocessResultList.erase(j);
2617            break;
2618        }
2619    }
2620    return NO_ERROR;
2621}
2622
2623/*===========================================================================
2624 * FUNCTION   : handleBatchMetadata
2625 *
2626 * DESCRIPTION: Handles metadata buffer callback in batch mode
2627 *
2628 * PARAMETERS : @metadata_buf: metadata buffer
2629 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2630 *                 the meta buf in this method
2631 *
2632 * RETURN     :
2633 *
2634 *==========================================================================*/
2635void QCamera3HardwareInterface::handleBatchMetadata(
2636        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2637{
2638    ATRACE_CALL();
2639
2640    if (NULL == metadata_buf) {
2641        LOGE("metadata_buf is NULL");
2642        return;
2643    }
2644    /* In batch mode, the metdata will contain the frame number and timestamp of
2645     * the last frame in the batch. Eg: a batch containing buffers from request
2646     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2647     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2648     * multiple process_capture_results */
2649    metadata_buffer_t *metadata =
2650            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2651    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2652    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2653    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2654    uint32_t frame_number = 0, urgent_frame_number = 0;
2655    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2656    bool invalid_metadata = false;
2657    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2658    size_t loopCount = 1;
2659
2660    int32_t *p_frame_number_valid =
2661            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2662    uint32_t *p_frame_number =
2663            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2664    int64_t *p_capture_time =
2665            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2666    int32_t *p_urgent_frame_number_valid =
2667            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2668    uint32_t *p_urgent_frame_number =
2669            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2670
2671    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2672            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2673            (NULL == p_urgent_frame_number)) {
2674        LOGE("Invalid metadata");
2675        invalid_metadata = true;
2676    } else {
2677        frame_number_valid = *p_frame_number_valid;
2678        last_frame_number = *p_frame_number;
2679        last_frame_capture_time = *p_capture_time;
2680        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2681        last_urgent_frame_number = *p_urgent_frame_number;
2682    }
2683
2684    /* In batchmode, when no video buffers are requested, set_parms are sent
2685     * for every capture_request. The difference between consecutive urgent
2686     * frame numbers and frame numbers should be used to interpolate the
2687     * corresponding frame numbers and time stamps */
2688    pthread_mutex_lock(&mMutex);
2689    if (urgent_frame_number_valid) {
2690        first_urgent_frame_number =
2691                mPendingBatchMap.valueFor(last_urgent_frame_number);
2692        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2693                first_urgent_frame_number;
2694
2695        LOGH("urgent_frm: valid: %d frm_num: %d - %d",
2696                 urgent_frame_number_valid,
2697                first_urgent_frame_number, last_urgent_frame_number);
2698    }
2699
2700    if (frame_number_valid) {
2701        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2702        frameNumDiff = last_frame_number + 1 -
2703                first_frame_number;
2704        mPendingBatchMap.removeItem(last_frame_number);
2705
2706        LOGH("frm: valid: %d frm_num: %d - %d",
2707                 frame_number_valid,
2708                first_frame_number, last_frame_number);
2709
2710    }
2711    pthread_mutex_unlock(&mMutex);
2712
2713    if (urgent_frame_number_valid || frame_number_valid) {
2714        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2715        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2716            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2717                     urgentFrameNumDiff, last_urgent_frame_number);
2718        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2719            LOGE("frameNumDiff: %d frameNum: %d",
2720                     frameNumDiff, last_frame_number);
2721    }
2722
2723    for (size_t i = 0; i < loopCount; i++) {
2724        /* handleMetadataWithLock is called even for invalid_metadata for
2725         * pipeline depth calculation */
2726        if (!invalid_metadata) {
2727            /* Infer frame number. Batch metadata contains frame number of the
2728             * last frame */
2729            if (urgent_frame_number_valid) {
2730                if (i < urgentFrameNumDiff) {
2731                    urgent_frame_number =
2732                            first_urgent_frame_number + i;
2733                    LOGD("inferred urgent frame_number: %d",
2734                             urgent_frame_number);
2735                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2736                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2737                } else {
2738                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2739                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2740                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2741                }
2742            }
2743
2744            /* Infer frame number. Batch metadata contains frame number of the
2745             * last frame */
2746            if (frame_number_valid) {
2747                if (i < frameNumDiff) {
2748                    frame_number = first_frame_number + i;
2749                    LOGD("inferred frame_number: %d", frame_number);
2750                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2751                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2752                } else {
2753                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2754                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2755                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2756                }
2757            }
2758
2759            if (last_frame_capture_time) {
2760                //Infer timestamp
2761                first_frame_capture_time = last_frame_capture_time -
2762                        (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2763                capture_time =
2764                        first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2765                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2766                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2767                LOGH("batch capture_time: %lld, capture_time: %lld",
2768                         last_frame_capture_time, capture_time);
2769            }
2770        }
2771        pthread_mutex_lock(&mMutex);
2772        handleMetadataWithLock(metadata_buf,
2773                false /* free_and_bufdone_meta_buf */,
2774                (i == 0) /* first metadata in the batch metadata */);
2775        pthread_mutex_unlock(&mMutex);
2776    }
2777
2778    /* BufDone metadata buffer */
2779    if (free_and_bufdone_meta_buf) {
2780        mMetadataChannel->bufDone(metadata_buf);
2781        free(metadata_buf);
2782    }
2783}
2784
2785void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2786        camera3_error_msg_code_t errorCode)
2787{
2788    camera3_notify_msg_t notify_msg;
2789    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2790    notify_msg.type = CAMERA3_MSG_ERROR;
2791    notify_msg.message.error.error_code = errorCode;
2792    notify_msg.message.error.error_stream = NULL;
2793    notify_msg.message.error.frame_number = frameNumber;
2794    mCallbackOps->notify(mCallbackOps, &notify_msg);
2795
2796    return;
2797}
2798/*===========================================================================
2799 * FUNCTION   : handleMetadataWithLock
2800 *
2801 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2802 *
2803 * PARAMETERS : @metadata_buf: metadata buffer
2804 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2805 *                 the meta buf in this method
2806 *              @firstMetadataInBatch: Boolean to indicate whether this is the
2807 *                  first metadata in a batch. Valid only for batch mode
2808 *
2809 * RETURN     :
2810 *
2811 *==========================================================================*/
2812void QCamera3HardwareInterface::handleMetadataWithLock(
2813    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2814    bool firstMetadataInBatch)
2815{
2816    ATRACE_CALL();
2817    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2818        //during flush do not send metadata from this thread
2819        LOGD("not sending metadata during flush or when mState is error");
2820        if (free_and_bufdone_meta_buf) {
2821            mMetadataChannel->bufDone(metadata_buf);
2822            free(metadata_buf);
2823        }
2824        return;
2825    }
2826
2827    //not in flush
2828    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2829    int32_t frame_number_valid, urgent_frame_number_valid;
2830    uint32_t frame_number, urgent_frame_number;
2831    int64_t capture_time;
2832    nsecs_t currentSysTime;
2833
2834    int32_t *p_frame_number_valid =
2835            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2836    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2837    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2838    int32_t *p_urgent_frame_number_valid =
2839            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2840    uint32_t *p_urgent_frame_number =
2841            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2842    IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2843            metadata) {
2844        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2845                 *p_frame_number_valid, *p_frame_number);
2846    }
2847
2848    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2849            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2850        LOGE("Invalid metadata");
2851        if (free_and_bufdone_meta_buf) {
2852            mMetadataChannel->bufDone(metadata_buf);
2853            free(metadata_buf);
2854        }
2855        goto done_metadata;
2856    }
2857    frame_number_valid =        *p_frame_number_valid;
2858    frame_number =              *p_frame_number;
2859    capture_time =              *p_capture_time;
2860    urgent_frame_number_valid = *p_urgent_frame_number_valid;
2861    urgent_frame_number =       *p_urgent_frame_number;
2862    currentSysTime =            systemTime(CLOCK_MONOTONIC);
2863
2864    // Detect if buffers from any requests are overdue
2865    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2866        if ( (currentSysTime - req.timestamp) >
2867            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2868            for (auto &missed : req.mPendingBufferList) {
2869                assert(missed.stream->priv);
2870                if (missed.stream->priv) {
2871                    QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
2872                    assert(ch->mStreams[0]);
2873                    if (ch->mStreams[0]) {
2874                        LOGW("Missing: frame = %d, buffer = %p,"
2875                            "stream type = %d, stream format = %d",
2876                            req.frame_number, missed.buffer,
2877                            ch->mStreams[0]->getMyType(), missed.stream->format);
2878                    }
2879                }
2880            }
2881        }
2882    }
2883    //Partial result on process_capture_result for timestamp
2884    if (urgent_frame_number_valid) {
2885        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2886           urgent_frame_number, capture_time);
2887
2888        //Recieved an urgent Frame Number, handle it
2889        //using partial results
2890        for (pendingRequestIterator i =
2891                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2892            LOGD("Iterator Frame = %d urgent frame = %d",
2893                 i->frame_number, urgent_frame_number);
2894
2895            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2896                (i->partial_result_cnt == 0)) {
2897                LOGE("Error: HAL missed urgent metadata for frame number %d",
2898                         i->frame_number);
2899            }
2900
2901            if (i->frame_number == urgent_frame_number &&
2902                     i->bUrgentReceived == 0) {
2903
2904                camera3_capture_result_t result;
2905                memset(&result, 0, sizeof(camera3_capture_result_t));
2906
2907                i->partial_result_cnt++;
2908                i->bUrgentReceived = 1;
2909                // Extract 3A metadata
2910                result.result =
2911                    translateCbUrgentMetadataToResultMetadata(metadata);
2912                // Populate metadata result
2913                result.frame_number = urgent_frame_number;
2914                result.num_output_buffers = 0;
2915                result.output_buffers = NULL;
2916                result.partial_result = i->partial_result_cnt;
2917
2918                mCallbackOps->process_capture_result(mCallbackOps, &result);
2919                LOGD("urgent frame_number = %u, capture_time = %lld",
2920                      result.frame_number, capture_time);
2921                free_camera_metadata((camera_metadata_t *)result.result);
2922                break;
2923            }
2924        }
2925    }
2926
2927    if (!frame_number_valid) {
2928        LOGD("Not a valid normal frame number, used as SOF only");
2929        if (free_and_bufdone_meta_buf) {
2930            mMetadataChannel->bufDone(metadata_buf);
2931            free(metadata_buf);
2932        }
2933        goto done_metadata;
2934    }
2935    LOGH("valid frame_number = %u, capture_time = %lld",
2936            frame_number, capture_time);
2937
2938    for (pendingRequestIterator i = mPendingRequestsList.begin();
2939            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2940        // Flush out all entries with less or equal frame numbers.
2941
2942        camera3_capture_result_t result;
2943        memset(&result, 0, sizeof(camera3_capture_result_t));
2944
2945        LOGD("frame_number in the list is %u", i->frame_number);
2946        i->partial_result_cnt++;
2947        result.partial_result = i->partial_result_cnt;
2948
2949        // Check whether any stream buffer corresponding to this is dropped or not
2950        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2951        // The API does not expect a blob buffer to be dropped
2952        if (p_cam_frame_drop) {
2953            /* Clear notify_msg structure */
2954            camera3_notify_msg_t notify_msg;
2955            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2956            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2957                    j != i->buffers.end(); j++) {
2958                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2959                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2960                for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
2961                    if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
2962                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2963                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2964                                __func__, i->frame_number, streamID, j->stream->format);
2965                        notify_msg.type = CAMERA3_MSG_ERROR;
2966                        notify_msg.message.error.frame_number = i->frame_number;
2967                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2968                        notify_msg.message.error.error_stream = j->stream;
2969                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2970                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2971                                __func__, i->frame_number, streamID, j->stream->format);
2972                        PendingFrameDropInfo PendingFrameDrop;
2973                        PendingFrameDrop.frame_number=i->frame_number;
2974                        PendingFrameDrop.stream_ID = streamID;
2975                        // Add the Frame drop info to mPendingFrameDropList
2976                        mPendingFrameDropList.push_back(PendingFrameDrop);
2977                   }
2978               }
2979            }
2980        }
2981
2982        // Send empty metadata with already filled buffers for dropped metadata
2983        // and send valid metadata with already filled buffers for current metadata
2984        /* we could hit this case when we either
2985         * 1. have a pending reprocess request or
2986         * 2. miss a metadata buffer callback */
2987        if (i->frame_number < frame_number) {
2988            if (i->input_buffer) {
2989                /* this will be handled in handleInputBufferWithLock */
2990                i++;
2991                continue;
2992            } else if (mBatchSize) {
2993
2994                mPendingLiveRequest--;
2995
2996                CameraMetadata dummyMetadata;
2997                dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
2998                result.result = dummyMetadata.release();
2999
3000                notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
3001            } else {
3002                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
3003                if (free_and_bufdone_meta_buf) {
3004                    mMetadataChannel->bufDone(metadata_buf);
3005                    free(metadata_buf);
3006                }
3007                mState = ERROR;
3008                goto done_metadata;
3009            }
3010        } else {
3011            mPendingLiveRequest--;
3012            /* Clear notify_msg structure */
3013            camera3_notify_msg_t notify_msg;
3014            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3015
3016            // Send shutter notify to frameworks
3017            notify_msg.type = CAMERA3_MSG_SHUTTER;
3018            notify_msg.message.shutter.frame_number = i->frame_number;
3019            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3020            mCallbackOps->notify(mCallbackOps, &notify_msg);
3021
3022            i->timestamp = capture_time;
3023
3024            // Find channel requiring metadata, meaning internal offline postprocess
3025            // is needed.
3026            //TODO: for now, we don't support two streams requiring metadata at the same time.
3027            // (because we are not making copies, and metadata buffer is not reference counted.
3028            bool internalPproc = false;
3029            for (pendingBufferIterator iter = i->buffers.begin();
3030                    iter != i->buffers.end(); iter++) {
3031                if (iter->need_metadata) {
3032                    internalPproc = true;
3033                    QCamera3ProcessingChannel *channel =
3034                            (QCamera3ProcessingChannel *)iter->stream->priv;
3035                    channel->queueReprocMetadata(metadata_buf);
3036                    break;
3037                }
3038            }
3039
3040            // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
3041            result.result = translateFromHalMetadata(metadata,
3042                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3043                    i->capture_intent, i->hybrid_ae_enable,
3044                     /* DevCamDebug metadata translateFromHalMetadata function call*/
3045                    i->DevCamDebug_meta_enable,
3046                    /* DevCamDebug metadata end */
3047                    internalPproc, i->fwkCacMode,
3048                    firstMetadataInBatch);
3049            // atrace_end(ATRACE_TAG_ALWAYS);
3050
3051            saveExifParams(metadata);
3052
3053            if (i->blob_request) {
3054                {
3055                    //Dump tuning metadata if enabled and available
3056                    char prop[PROPERTY_VALUE_MAX];
3057                    memset(prop, 0, sizeof(prop));
3058                    property_get("persist.camera.dumpmetadata", prop, "0");
3059                    int32_t enabled = atoi(prop);
3060                    if (enabled && metadata->is_tuning_params_valid) {
3061                        dumpMetadataToFile(metadata->tuning_params,
3062                               mMetaFrameCount,
3063                               enabled,
3064                               "Snapshot",
3065                               frame_number);
3066                    }
3067                }
3068            }
3069
3070            if (!internalPproc) {
3071                LOGD("couldn't find need_metadata for this metadata");
3072                // Return metadata buffer
3073                if (free_and_bufdone_meta_buf) {
3074                    mMetadataChannel->bufDone(metadata_buf);
3075                    free(metadata_buf);
3076                }
3077            }
3078        }
3079        if (!result.result) {
3080            LOGE("metadata is NULL");
3081        }
3082        result.frame_number = i->frame_number;
3083        result.input_buffer = i->input_buffer;
3084        result.num_output_buffers = 0;
3085        result.output_buffers = NULL;
3086        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3087                    j != i->buffers.end(); j++) {
3088            if (j->buffer) {
3089                result.num_output_buffers++;
3090            }
3091        }
3092
3093        updateFpsInPreviewBuffer(metadata, i->frame_number);
3094
3095        if (result.num_output_buffers > 0) {
3096            camera3_stream_buffer_t *result_buffers =
3097                new camera3_stream_buffer_t[result.num_output_buffers];
3098            if (result_buffers != NULL) {
3099                size_t result_buffers_idx = 0;
3100                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3101                        j != i->buffers.end(); j++) {
3102                    if (j->buffer) {
3103                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3104                                m != mPendingFrameDropList.end(); m++) {
3105                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3106                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3107                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3108                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3109                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3110                                        frame_number, streamID);
3111                                m = mPendingFrameDropList.erase(m);
3112                                break;
3113                            }
3114                        }
3115                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
3116                        result_buffers[result_buffers_idx++] = *(j->buffer);
3117                        free(j->buffer);
3118                        j->buffer = NULL;
3119                    }
3120                }
3121                result.output_buffers = result_buffers;
3122                mCallbackOps->process_capture_result(mCallbackOps, &result);
3123                LOGD("meta frame_number = %u, capture_time = %lld",
3124                        result.frame_number, i->timestamp);
3125                free_camera_metadata((camera_metadata_t *)result.result);
3126                delete[] result_buffers;
3127            }else {
3128                LOGE("Fatal error: out of memory");
3129            }
3130        } else {
3131            mCallbackOps->process_capture_result(mCallbackOps, &result);
3132            LOGD("meta frame_number = %u, capture_time = %lld",
3133                    result.frame_number, i->timestamp);
3134            free_camera_metadata((camera_metadata_t *)result.result);
3135        }
3136
3137        i = erasePendingRequest(i);
3138
3139        if (!mPendingReprocessResultList.empty()) {
3140            handlePendingReprocResults(frame_number + 1);
3141        }
3142    }
3143
3144done_metadata:
3145    for (pendingRequestIterator i = mPendingRequestsList.begin();
3146            i != mPendingRequestsList.end() ;i++) {
3147        i->pipeline_depth++;
3148    }
3149    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3150    unblockRequestIfNecessary();
3151}
3152
3153/*===========================================================================
3154 * FUNCTION   : hdrPlusPerfLock
3155 *
3156 * DESCRIPTION: perf lock for HDR+ using custom intent
3157 *
3158 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3159 *
3160 * RETURN     : None
3161 *
3162 *==========================================================================*/
3163void QCamera3HardwareInterface::hdrPlusPerfLock(
3164        mm_camera_super_buf_t *metadata_buf)
3165{
3166    if (NULL == metadata_buf) {
3167        LOGE("metadata_buf is NULL");
3168        return;
3169    }
3170    metadata_buffer_t *metadata =
3171            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3172    int32_t *p_frame_number_valid =
3173            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3174    uint32_t *p_frame_number =
3175            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3176
3177    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3178        LOGE("%s: Invalid metadata", __func__);
3179        return;
3180    }
3181
3182    //acquire perf lock for 5 sec after the last HDR frame is captured
3183    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3184        if ((p_frame_number != NULL) &&
3185                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3186            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3187        }
3188    }
3189
3190    //release lock after perf lock timer is expired. If lock is already released,
3191    //isTimerReset returns false
3192    if (m_perfLock.isTimerReset()) {
3193        mLastCustIntentFrmNum = -1;
3194        m_perfLock.lock_rel_timed();
3195    }
3196}
3197
3198/*===========================================================================
3199 * FUNCTION   : handleInputBufferWithLock
3200 *
3201 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3202 *
3203 * PARAMETERS : @frame_number: frame number of the input buffer
3204 *
3205 * RETURN     :
3206 *
3207 *==========================================================================*/
3208void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3209{
3210    ATRACE_CALL();
3211    pendingRequestIterator i = mPendingRequestsList.begin();
3212    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3213        i++;
3214    }
3215    if (i != mPendingRequestsList.end() && i->input_buffer) {
3216        //found the right request
3217        if (!i->shutter_notified) {
3218            CameraMetadata settings;
3219            camera3_notify_msg_t notify_msg;
3220            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3221            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3222            if(i->settings) {
3223                settings = i->settings;
3224                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3225                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3226                } else {
3227                    LOGE("No timestamp in input settings! Using current one.");
3228                }
3229            } else {
3230                LOGE("Input settings missing!");
3231            }
3232
3233            notify_msg.type = CAMERA3_MSG_SHUTTER;
3234            notify_msg.message.shutter.frame_number = frame_number;
3235            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3236            mCallbackOps->notify(mCallbackOps, &notify_msg);
3237            i->shutter_notified = true;
3238            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3239                        i->frame_number, notify_msg.message.shutter.timestamp);
3240        }
3241
3242        if (i->input_buffer->release_fence != -1) {
3243           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3244           close(i->input_buffer->release_fence);
3245           if (rc != OK) {
3246               LOGE("input buffer sync wait failed %d", rc);
3247           }
3248        }
3249
3250        camera3_capture_result result;
3251        memset(&result, 0, sizeof(camera3_capture_result));
3252        result.frame_number = frame_number;
3253        result.result = i->settings;
3254        result.input_buffer = i->input_buffer;
3255        result.partial_result = PARTIAL_RESULT_COUNT;
3256
3257        mCallbackOps->process_capture_result(mCallbackOps, &result);
3258        LOGD("Input request metadata and input buffer frame_number = %u",
3259                        i->frame_number);
3260        i = erasePendingRequest(i);
3261    } else {
3262        LOGE("Could not find input request for frame number %d", frame_number);
3263    }
3264}
3265
3266/*===========================================================================
3267 * FUNCTION   : handleBufferWithLock
3268 *
3269 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3270 *
3271 * PARAMETERS : @buffer: image buffer for the callback
3272 *              @frame_number: frame number of the image buffer
3273 *
3274 * RETURN     :
3275 *
3276 *==========================================================================*/
3277void QCamera3HardwareInterface::handleBufferWithLock(
3278    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3279{
3280    ATRACE_CALL();
3281    /* Nothing to be done during error state */
3282    if ((ERROR == mState) || (DEINIT == mState)) {
3283        return;
3284    }
3285    if (mFlushPerf) {
3286        handleBuffersDuringFlushLock(buffer);
3287        return;
3288    }
3289    //not in flush
3290    // If the frame number doesn't exist in the pending request list,
3291    // directly send the buffer to the frameworks, and update pending buffers map
3292    // Otherwise, book-keep the buffer.
3293    pendingRequestIterator i = mPendingRequestsList.begin();
3294    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3295        i++;
3296    }
3297    if (i == mPendingRequestsList.end()) {
3298        // Verify all pending requests frame_numbers are greater
3299        for (pendingRequestIterator j = mPendingRequestsList.begin();
3300                j != mPendingRequestsList.end(); j++) {
3301            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3302                LOGW("Error: pending live frame number %d is smaller than %d",
3303                         j->frame_number, frame_number);
3304            }
3305        }
3306        camera3_capture_result_t result;
3307        memset(&result, 0, sizeof(camera3_capture_result_t));
3308        result.result = NULL;
3309        result.frame_number = frame_number;
3310        result.num_output_buffers = 1;
3311        result.partial_result = 0;
3312        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3313                m != mPendingFrameDropList.end(); m++) {
3314            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3315            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3316            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3317                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3318                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3319                         frame_number, streamID);
3320                m = mPendingFrameDropList.erase(m);
3321                break;
3322            }
3323        }
3324        result.output_buffers = buffer;
3325        LOGH("result frame_number = %d, buffer = %p",
3326                 frame_number, buffer->buffer);
3327
3328        mPendingBuffersMap.removeBuf(buffer->buffer);
3329
3330        mCallbackOps->process_capture_result(mCallbackOps, &result);
3331    } else {
3332        if (i->input_buffer) {
3333            CameraMetadata settings;
3334            camera3_notify_msg_t notify_msg;
3335            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3336            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3337            if(i->settings) {
3338                settings = i->settings;
3339                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3340                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3341                } else {
3342                    LOGW("No timestamp in input settings! Using current one.");
3343                }
3344            } else {
3345                LOGE("Input settings missing!");
3346            }
3347
3348            notify_msg.type = CAMERA3_MSG_SHUTTER;
3349            notify_msg.message.shutter.frame_number = frame_number;
3350            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3351
3352            if (i->input_buffer->release_fence != -1) {
3353               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3354               close(i->input_buffer->release_fence);
3355               if (rc != OK) {
3356                   LOGE("input buffer sync wait failed %d", rc);
3357               }
3358            }
3359            mPendingBuffersMap.removeBuf(buffer->buffer);
3360
3361            bool notifyNow = true;
3362            for (pendingRequestIterator j = mPendingRequestsList.begin();
3363                    j != mPendingRequestsList.end(); j++) {
3364                if (j->frame_number < frame_number) {
3365                    notifyNow = false;
3366                    break;
3367                }
3368            }
3369
3370            if (notifyNow) {
3371                camera3_capture_result result;
3372                memset(&result, 0, sizeof(camera3_capture_result));
3373                result.frame_number = frame_number;
3374                result.result = i->settings;
3375                result.input_buffer = i->input_buffer;
3376                result.num_output_buffers = 1;
3377                result.output_buffers = buffer;
3378                result.partial_result = PARTIAL_RESULT_COUNT;
3379
3380                mCallbackOps->notify(mCallbackOps, &notify_msg);
3381                mCallbackOps->process_capture_result(mCallbackOps, &result);
3382                LOGD("Notify reprocess now %d!", frame_number);
3383                i = erasePendingRequest(i);
3384            } else {
3385                // Cache reprocess result for later
3386                PendingReprocessResult pendingResult;
3387                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3388                pendingResult.notify_msg = notify_msg;
3389                pendingResult.buffer = *buffer;
3390                pendingResult.frame_number = frame_number;
3391                mPendingReprocessResultList.push_back(pendingResult);
3392                LOGD("Cache reprocess result %d!", frame_number);
3393            }
3394        } else {
3395            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3396                j != i->buffers.end(); j++) {
3397                if (j->stream == buffer->stream) {
3398                    if (j->buffer != NULL) {
3399                        LOGE("Error: buffer is already set");
3400                    } else {
3401                        j->buffer = (camera3_stream_buffer_t *)malloc(
3402                            sizeof(camera3_stream_buffer_t));
3403                        *(j->buffer) = *buffer;
3404                        LOGH("cache buffer %p at result frame_number %u",
3405                             buffer->buffer, frame_number);
3406                    }
3407                }
3408            }
3409        }
3410    }
3411}
3412
3413/*===========================================================================
3414 * FUNCTION   : unblockRequestIfNecessary
3415 *
3416 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3417 *              that mMutex is held when this function is called.
3418 *
3419 * PARAMETERS :
3420 *
3421 * RETURN     :
3422 *
3423 *==========================================================================*/
3424void QCamera3HardwareInterface::unblockRequestIfNecessary()
3425{
3426   // Unblock process_capture_request
3427   pthread_cond_signal(&mRequestCond);
3428}
3429
3430
3431/*===========================================================================
3432 * FUNCTION   : processCaptureRequest
3433 *
3434 * DESCRIPTION: process a capture request from camera service
3435 *
3436 * PARAMETERS :
3437 *   @request : request from framework to process
3438 *
3439 * RETURN     :
3440 *
3441 *==========================================================================*/
3442int QCamera3HardwareInterface::processCaptureRequest(
3443                    camera3_capture_request_t *request)
3444{
3445    ATRACE_CALL();
3446    int rc = NO_ERROR;
3447    int32_t request_id;
3448    CameraMetadata meta;
3449    bool isVidBufRequested = false;
3450    camera3_stream_buffer_t *pInputBuffer = NULL;
3451
3452    pthread_mutex_lock(&mMutex);
3453
3454    // Validate current state
3455    switch (mState) {
3456        case CONFIGURED:
3457        case STARTED:
3458            /* valid state */
3459            break;
3460
3461        case ERROR:
3462            pthread_mutex_unlock(&mMutex);
3463            handleCameraDeviceError();
3464            return -ENODEV;
3465
3466        default:
3467            LOGE("Invalid state %d", mState);
3468            pthread_mutex_unlock(&mMutex);
3469            return -ENODEV;
3470    }
3471
3472    rc = validateCaptureRequest(request);
3473    if (rc != NO_ERROR) {
3474        LOGE("incoming request is not valid");
3475        pthread_mutex_unlock(&mMutex);
3476        return rc;
3477    }
3478
3479    meta = request->settings;
3480
3481    // For first capture request, send capture intent, and
3482    // stream on all streams
3483    if (mState == CONFIGURED) {
3484        // send an unconfigure to the backend so that the isp
3485        // resources are deallocated
3486        if (!mFirstConfiguration) {
3487            cam_stream_size_info_t stream_config_info;
3488            int32_t hal_version = CAM_HAL_V3;
3489            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3490            stream_config_info.buffer_info.min_buffers =
3491                    MIN_INFLIGHT_REQUESTS;
3492            stream_config_info.buffer_info.max_buffers =
3493                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3494            clear_metadata_buffer(mParameters);
3495            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3496                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3497            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3498                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3499            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3500                    mParameters);
3501            if (rc < 0) {
3502                LOGE("set_parms for unconfigure failed");
3503                pthread_mutex_unlock(&mMutex);
3504                return rc;
3505            }
3506        }
3507        m_perfLock.lock_acq();
3508        /* get eis information for stream configuration */
3509        cam_is_type_t is_type;
3510        char is_type_value[PROPERTY_VALUE_MAX];
3511        property_get("persist.camera.is_type", is_type_value, "0");
3512        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3513
3514        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3515            int32_t hal_version = CAM_HAL_V3;
3516            uint8_t captureIntent =
3517                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3518            mCaptureIntent = captureIntent;
3519            clear_metadata_buffer(mParameters);
3520            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3521            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3522        }
3523
3524        //If EIS is enabled, turn it on for video
3525        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3526        int32_t vsMode;
3527        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3528        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3529            rc = BAD_VALUE;
3530        }
3531
3532        //IS type will be 0 unless EIS is supported. If EIS is supported
3533        //it could either be 1 or 4 depending on the stream and video size
3534        if (setEis) {
3535            if (!m_bEisSupportedSize) {
3536                is_type = IS_TYPE_DIS;
3537            } else {
3538                is_type = IS_TYPE_EIS_2_0;
3539            }
3540            mStreamConfigInfo.is_type = is_type;
3541        } else {
3542            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3543        }
3544
3545        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3546                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3547        int32_t tintless_value = 1;
3548        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3549                CAM_INTF_PARM_TINTLESS, tintless_value);
3550        //Disable CDS for HFR mode or if DIS/EIS is on.
3551        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3552        //after every configure_stream
3553        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3554                (m_bIsVideo)) {
3555            int32_t cds = CAM_CDS_MODE_OFF;
3556            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3557                    CAM_INTF_PARM_CDS_MODE, cds))
3558                LOGE("Failed to disable CDS for HFR mode");
3559
3560        }
3561        setMobicat();
3562
3563        /* Set fps and hfr mode while sending meta stream info so that sensor
3564         * can configure appropriate streaming mode */
3565        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3566        mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3567        mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3568        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3569            rc = setHalFpsRange(meta, mParameters);
3570            if (rc == NO_ERROR) {
3571                int32_t max_fps =
3572                    (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3573                if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3574                    mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3575                }
3576                /* For HFR, more buffers are dequeued upfront to improve the performance */
3577                if (mBatchSize) {
3578                    mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3579                    mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3580                }
3581            }
3582            else {
3583                LOGE("setHalFpsRange failed");
3584            }
3585        }
3586        if (meta.exists(ANDROID_CONTROL_MODE)) {
3587            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3588            rc = extractSceneMode(meta, metaMode, mParameters);
3589            if (rc != NO_ERROR) {
3590                LOGE("extractSceneMode failed");
3591            }
3592        }
3593        memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3594
3595        //TODO: validate the arguments, HSV scenemode should have only the
3596        //advertised fps ranges
3597
3598        /*set the capture intent, hal version, tintless, stream info,
3599         *and disenable parameters to the backend*/
3600        LOGD("set_parms META_STREAM_INFO " );
3601        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3602            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3603                    "Format:%d",
3604                    mStreamConfigInfo.type[i],
3605                    mStreamConfigInfo.stream_sizes[i].width,
3606                    mStreamConfigInfo.stream_sizes[i].height,
3607                    mStreamConfigInfo.postprocess_mask[i],
3608                    mStreamConfigInfo.format[i]);
3609        }
3610        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3611                    mParameters);
3612        if (rc < 0) {
3613            LOGE("set_parms failed for hal version, stream info");
3614        }
3615
3616        cam_dimension_t sensor_dim;
3617        memset(&sensor_dim, 0, sizeof(sensor_dim));
3618        rc = getSensorOutputSize(sensor_dim);
3619        if (rc != NO_ERROR) {
3620            LOGE("Failed to get sensor output size");
3621            pthread_mutex_unlock(&mMutex);
3622            goto error_exit;
3623        }
3624
3625        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3626                gCamCapability[mCameraId]->active_array_size.height,
3627                sensor_dim.width, sensor_dim.height);
3628
3629        /* Set batchmode before initializing channel. Since registerBuffer
3630         * internally initializes some of the channels, better set batchmode
3631         * even before first register buffer */
3632        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3633            it != mStreamInfo.end(); it++) {
3634            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3635            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3636                    && mBatchSize) {
3637                rc = channel->setBatchSize(mBatchSize);
3638                //Disable per frame map unmap for HFR/batchmode case
3639                rc |= channel->setPerFrameMapUnmap(false);
3640                if (NO_ERROR != rc) {
3641                    LOGE("Channel init failed %d", rc);
3642                    pthread_mutex_unlock(&mMutex);
3643                    goto error_exit;
3644                }
3645            }
3646        }
3647
3648        //First initialize all streams
3649        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3650            it != mStreamInfo.end(); it++) {
3651            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3652            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3653               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3654               setEis)
3655                rc = channel->initialize(is_type);
3656            else {
3657                rc = channel->initialize(IS_TYPE_NONE);
3658            }
3659            if (NO_ERROR != rc) {
3660                LOGE("Channel initialization failed %d", rc);
3661                pthread_mutex_unlock(&mMutex);
3662                goto error_exit;
3663            }
3664        }
3665
3666        if (mRawDumpChannel) {
3667            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3668            if (rc != NO_ERROR) {
3669                LOGE("Error: Raw Dump Channel init failed");
3670                pthread_mutex_unlock(&mMutex);
3671                goto error_exit;
3672            }
3673        }
3674        if (mSupportChannel) {
3675            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3676            if (rc < 0) {
3677                LOGE("Support channel initialization failed");
3678                pthread_mutex_unlock(&mMutex);
3679                goto error_exit;
3680            }
3681        }
3682        if (mAnalysisChannel) {
3683            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3684            if (rc < 0) {
3685                LOGE("Analysis channel initialization failed");
3686                pthread_mutex_unlock(&mMutex);
3687                goto error_exit;
3688            }
3689        }
3690        if (mDummyBatchChannel) {
3691            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3692            if (rc < 0) {
3693                LOGE("mDummyBatchChannel setBatchSize failed");
3694                pthread_mutex_unlock(&mMutex);
3695                goto error_exit;
3696            }
3697            rc = mDummyBatchChannel->initialize(is_type);
3698            if (rc < 0) {
3699                LOGE("mDummyBatchChannel initialization failed");
3700                pthread_mutex_unlock(&mMutex);
3701                goto error_exit;
3702            }
3703        }
3704
3705        // Set bundle info
3706        rc = setBundleInfo();
3707        if (rc < 0) {
3708            LOGE("setBundleInfo failed %d", rc);
3709            pthread_mutex_unlock(&mMutex);
3710            goto error_exit;
3711        }
3712
3713        //update settings from app here
3714        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3715            mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3716            LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3717        }
3718        if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3719            mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3720            LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3721        }
3722        if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3723            mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3724            LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3725
3726            if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3727                (mLinkedCameraId != mCameraId) ) {
3728                LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3729                    mLinkedCameraId, mCameraId);
3730                goto error_exit;
3731            }
3732        }
3733
3734        // add bundle related cameras
3735        LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3736        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3737            if (mIsDeviceLinked)
3738                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3739            else
3740                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3741
3742            pthread_mutex_lock(&gCamLock);
3743
3744            if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3745                LOGE("Dualcam: Invalid Session Id ");
3746                pthread_mutex_unlock(&gCamLock);
3747                goto error_exit;
3748            }
3749
3750            if (mIsMainCamera == 1) {
3751                m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3752                m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3753                // related session id should be session id of linked session
3754                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3755            } else {
3756                m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3757                m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3758                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3759            }
3760            pthread_mutex_unlock(&gCamLock);
3761
3762            rc = mCameraHandle->ops->sync_related_sensors(
3763                    mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3764            if (rc < 0) {
3765                LOGE("Dualcam: link failed");
3766                goto error_exit;
3767            }
3768        }
3769
3770        //Then start them.
3771        LOGH("Start META Channel");
3772        rc = mMetadataChannel->start();
3773        if (rc < 0) {
3774            LOGE("META channel start failed");
3775            pthread_mutex_unlock(&mMutex);
3776            goto error_exit;
3777        }
3778
3779        if (mAnalysisChannel) {
3780            rc = mAnalysisChannel->start();
3781            if (rc < 0) {
3782                LOGE("Analysis channel start failed");
3783                mMetadataChannel->stop();
3784                pthread_mutex_unlock(&mMutex);
3785                goto error_exit;
3786            }
3787        }
3788
3789        if (mSupportChannel) {
3790            rc = mSupportChannel->start();
3791            if (rc < 0) {
3792                LOGE("Support channel start failed");
3793                mMetadataChannel->stop();
3794                /* Although support and analysis are mutually exclusive today
3795                   adding it in anycase for future proofing */
3796                if (mAnalysisChannel) {
3797                    mAnalysisChannel->stop();
3798                }
3799                pthread_mutex_unlock(&mMutex);
3800                goto error_exit;
3801            }
3802        }
3803        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3804            it != mStreamInfo.end(); it++) {
3805            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3806            LOGH("Start Processing Channel mask=%d",
3807                     channel->getStreamTypeMask());
3808            rc = channel->start();
3809            if (rc < 0) {
3810                LOGE("channel start failed");
3811                pthread_mutex_unlock(&mMutex);
3812                goto error_exit;
3813            }
3814        }
3815
3816        if (mRawDumpChannel) {
3817            LOGD("Starting raw dump stream");
3818            rc = mRawDumpChannel->start();
3819            if (rc != NO_ERROR) {
3820                LOGE("Error Starting Raw Dump Channel");
3821                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3822                      it != mStreamInfo.end(); it++) {
3823                    QCamera3Channel *channel =
3824                        (QCamera3Channel *)(*it)->stream->priv;
3825                    LOGH("Stopping Processing Channel mask=%d",
3826                        channel->getStreamTypeMask());
3827                    channel->stop();
3828                }
3829                if (mSupportChannel)
3830                    mSupportChannel->stop();
3831                if (mAnalysisChannel) {
3832                    mAnalysisChannel->stop();
3833                }
3834                mMetadataChannel->stop();
3835                pthread_mutex_unlock(&mMutex);
3836                goto error_exit;
3837            }
3838        }
3839
3840        if (mChannelHandle) {
3841
3842            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3843                    mChannelHandle);
3844            if (rc != NO_ERROR) {
3845                LOGE("start_channel failed %d", rc);
3846                pthread_mutex_unlock(&mMutex);
3847                goto error_exit;
3848            }
3849        }
3850
3851        goto no_error;
3852error_exit:
3853        m_perfLock.lock_rel();
3854        return rc;
3855no_error:
3856        m_perfLock.lock_rel();
3857
3858        mWokenUpByDaemon = false;
3859        mPendingLiveRequest = 0;
3860        mFirstConfiguration = false;
3861        enablePowerHint();
3862    }
3863
3864    uint32_t frameNumber = request->frame_number;
3865    cam_stream_ID_t streamsArray;
3866
3867    if (mFlushPerf) {
3868        //we cannot accept any requests during flush
3869        LOGE("process_capture_request cannot proceed during flush");
3870        pthread_mutex_unlock(&mMutex);
3871        return NO_ERROR; //should return an error
3872    }
3873
3874    if (meta.exists(ANDROID_REQUEST_ID)) {
3875        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3876        mCurrentRequestId = request_id;
3877        LOGD("Received request with id: %d", request_id);
3878    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3879        LOGE("Unable to find request id field, \
3880                & no previous id available");
3881        pthread_mutex_unlock(&mMutex);
3882        return NAME_NOT_FOUND;
3883    } else {
3884        LOGD("Re-using old request id");
3885        request_id = mCurrentRequestId;
3886    }
3887
3888    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3889                                    request->num_output_buffers,
3890                                    request->input_buffer,
3891                                    frameNumber);
3892    // Acquire all request buffers first
3893    streamsArray.num_streams = 0;
3894    int blob_request = 0;
3895    uint32_t snapshotStreamId = 0;
3896    for (size_t i = 0; i < request->num_output_buffers; i++) {
3897        const camera3_stream_buffer_t& output = request->output_buffers[i];
3898        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3899
3900        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3901            //Call function to store local copy of jpeg data for encode params.
3902            blob_request = 1;
3903            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3904        }
3905
3906        if (output.acquire_fence != -1) {
3907           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3908           close(output.acquire_fence);
3909           if (rc != OK) {
3910              LOGE("sync wait failed %d", rc);
3911              pthread_mutex_unlock(&mMutex);
3912              return rc;
3913           }
3914        }
3915
3916        streamsArray.stream_request[streamsArray.num_streams++].streamID =
3917            channel->getStreamID(channel->getStreamTypeMask());
3918
3919        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3920            isVidBufRequested = true;
3921        }
3922    }
3923
3924    if (blob_request) {
3925        KPI_ATRACE_INT("SNAPSHOT", 1);
3926    }
3927    if (blob_request && mRawDumpChannel) {
3928        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3929        streamsArray.stream_request[streamsArray.num_streams].streamID =
3930            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3931        streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
3932    }
3933
3934    if(request->input_buffer == NULL) {
3935        /* Parse the settings:
3936         * - For every request in NORMAL MODE
3937         * - For every request in HFR mode during preview only case
3938         * - For first request of every batch in HFR mode during video
3939         * recording. In batchmode the same settings except frame number is
3940         * repeated in each request of the batch.
3941         */
3942        if (!mBatchSize ||
3943           (mBatchSize && !isVidBufRequested) ||
3944           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3945            rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
3946            if (rc < 0) {
3947                LOGE("fail to set frame parameters");
3948                pthread_mutex_unlock(&mMutex);
3949                return rc;
3950            }
3951        }
3952        /* For batchMode HFR, setFrameParameters is not called for every
3953         * request. But only frame number of the latest request is parsed.
3954         * Keep track of first and last frame numbers in a batch so that
3955         * metadata for the frame numbers of batch can be duplicated in
3956         * handleBatchMetadta */
3957        if (mBatchSize) {
3958            if (!mToBeQueuedVidBufs) {
3959                //start of the batch
3960                mFirstFrameNumberInBatch = request->frame_number;
3961            }
3962            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3963                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3964                LOGE("Failed to set the frame number in the parameters");
3965                return BAD_VALUE;
3966            }
3967        }
3968        if (mNeedSensorRestart) {
3969            /* Unlock the mutex as restartSensor waits on the channels to be
3970             * stopped, which in turn calls stream callback functions -
3971             * handleBufferWithLock and handleMetadataWithLock */
3972            pthread_mutex_unlock(&mMutex);
3973            rc = dynamicUpdateMetaStreamInfo();
3974            if (rc != NO_ERROR) {
3975                LOGE("Restarting the sensor failed");
3976                return BAD_VALUE;
3977            }
3978            mNeedSensorRestart = false;
3979            pthread_mutex_lock(&mMutex);
3980        }
3981    } else {
3982
3983        if (request->input_buffer->acquire_fence != -1) {
3984           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3985           close(request->input_buffer->acquire_fence);
3986           if (rc != OK) {
3987              LOGE("input buffer sync wait failed %d", rc);
3988              pthread_mutex_unlock(&mMutex);
3989              return rc;
3990           }
3991        }
3992    }
3993
3994    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3995        mLastCustIntentFrmNum = frameNumber;
3996    }
3997    /* Update pending request list and pending buffers map */
3998    PendingRequestInfo pendingRequest;
3999    pendingRequestIterator latestRequest;
4000    pendingRequest.frame_number = frameNumber;
4001    pendingRequest.num_buffers = request->num_output_buffers;
4002    pendingRequest.request_id = request_id;
4003    pendingRequest.blob_request = blob_request;
4004    pendingRequest.timestamp = 0;
4005    pendingRequest.bUrgentReceived = 0;
4006    if (request->input_buffer) {
4007        pendingRequest.input_buffer =
4008                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4009        *(pendingRequest.input_buffer) = *(request->input_buffer);
4010        pInputBuffer = pendingRequest.input_buffer;
4011    } else {
4012       pendingRequest.input_buffer = NULL;
4013       pInputBuffer = NULL;
4014    }
4015
4016    pendingRequest.pipeline_depth = 0;
4017    pendingRequest.partial_result_cnt = 0;
4018    extractJpegMetadata(mCurJpegMeta, request);
4019    pendingRequest.jpegMetadata = mCurJpegMeta;
4020    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4021    pendingRequest.shutter_notified = false;
4022
4023    //extract capture intent
4024    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4025        mCaptureIntent =
4026                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4027    }
4028    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
4029        mHybridAeEnable =
4030                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
4031    }
4032    pendingRequest.capture_intent = mCaptureIntent;
4033    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
4034    /* DevCamDebug metadata processCaptureRequest */
4035    if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4036        mDevCamDebugMetaEnable =
4037                meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4038    }
4039    pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4040    /* DevCamDebug metadata end */
4041
4042    //extract CAC info
4043    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4044        mCacMode =
4045                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4046    }
4047    pendingRequest.fwkCacMode = mCacMode;
4048
4049    PendingBuffersInRequest bufsForCurRequest;
4050    bufsForCurRequest.frame_number = frameNumber;
4051    // Mark current timestamp for the new request
4052    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4053
4054    for (size_t i = 0; i < request->num_output_buffers; i++) {
4055        RequestedBufferInfo requestedBuf;
4056        memset(&requestedBuf, 0, sizeof(requestedBuf));
4057        requestedBuf.stream = request->output_buffers[i].stream;
4058        requestedBuf.buffer = NULL;
4059        pendingRequest.buffers.push_back(requestedBuf);
4060
4061        // Add to buffer handle the pending buffers list
4062        PendingBufferInfo bufferInfo;
4063        bufferInfo.buffer = request->output_buffers[i].buffer;
4064        bufferInfo.stream = request->output_buffers[i].stream;
4065        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4066        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4067        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4068            frameNumber, bufferInfo.buffer,
4069            channel->getStreamTypeMask(), bufferInfo.stream->format);
4070    }
4071    // Add this request packet into mPendingBuffersMap
4072    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4073    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4074        mPendingBuffersMap.get_num_overall_buffers());
4075
4076    latestRequest = mPendingRequestsList.insert(
4077            mPendingRequestsList.end(), pendingRequest);
4078    if(mFlush) {
4079        LOGI("mFlush is true");
4080        pthread_mutex_unlock(&mMutex);
4081        return NO_ERROR;
4082    }
4083
4084    int indexUsed;
4085    // Notify metadata channel we receive a request
4086    mMetadataChannel->request(NULL, frameNumber, indexUsed);
4087
4088    if(request->input_buffer != NULL){
4089        LOGD("Input request, frame_number %d", frameNumber);
4090        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4091        if (NO_ERROR != rc) {
4092            LOGE("fail to set reproc parameters");
4093            pthread_mutex_unlock(&mMutex);
4094            return rc;
4095        }
4096    }
4097
4098    // Call request on other streams
4099    uint32_t streams_need_metadata = 0;
4100    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4101    for (size_t i = 0; i < request->num_output_buffers; i++) {
4102        const camera3_stream_buffer_t& output = request->output_buffers[i];
4103        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4104
4105        if (channel == NULL) {
4106            LOGW("invalid channel pointer for stream");
4107            continue;
4108        }
4109
4110        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4111            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4112                      output.buffer, request->input_buffer, frameNumber);
4113            if(request->input_buffer != NULL){
4114                rc = channel->request(output.buffer, frameNumber,
4115                        pInputBuffer, &mReprocMeta, indexUsed);
4116                if (rc < 0) {
4117                    LOGE("Fail to request on picture channel");
4118                    pthread_mutex_unlock(&mMutex);
4119                    return rc;
4120                }
4121            } else {
4122                LOGD("snapshot request with buffer %p, frame_number %d",
4123                         output.buffer, frameNumber);
4124                if (!request->settings) {
4125                    rc = channel->request(output.buffer, frameNumber,
4126                            NULL, mPrevParameters, indexUsed);
4127                } else {
4128                    rc = channel->request(output.buffer, frameNumber,
4129                            NULL, mParameters, indexUsed);
4130                }
4131                if (rc < 0) {
4132                    LOGE("Fail to request on picture channel");
4133                    pthread_mutex_unlock(&mMutex);
4134                    return rc;
4135                }
4136
4137                uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4138                uint32_t j = 0;
4139                for (j = 0; j < streamsArray.num_streams; j++) {
4140                    if (streamsArray.stream_request[j].streamID == streamId) {
4141                      if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4142                          streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4143                      else
4144                          streamsArray.stream_request[j].buf_index = indexUsed;
4145                        break;
4146                    }
4147                }
4148                if (j == streamsArray.num_streams) {
4149                    LOGE("Did not find matching stream to update index");
4150                    assert(0);
4151                }
4152
4153                pendingBufferIter->need_metadata = true;
4154                streams_need_metadata++;
4155            }
4156        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4157            bool needMetadata = false;
4158            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4159            rc = yuvChannel->request(output.buffer, frameNumber,
4160                    pInputBuffer,
4161                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
4162            if (rc < 0) {
4163                LOGE("Fail to request on YUV channel");
4164                pthread_mutex_unlock(&mMutex);
4165                return rc;
4166            }
4167
4168            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4169            uint32_t j = 0;
4170            for (j = 0; j < streamsArray.num_streams; j++) {
4171                if (streamsArray.stream_request[j].streamID == streamId) {
4172                    if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4173                        streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4174                    else
4175                        streamsArray.stream_request[j].buf_index = indexUsed;
4176                    break;
4177                }
4178            }
4179            if (j == streamsArray.num_streams) {
4180                LOGE("Did not find matching stream to update index");
4181                assert(0);
4182            }
4183
4184            pendingBufferIter->need_metadata = needMetadata;
4185            if (needMetadata)
4186                streams_need_metadata += 1;
4187            LOGD("calling YUV channel request, need_metadata is %d",
4188                     needMetadata);
4189        } else {
4190            LOGD("request with buffer %p, frame_number %d",
4191                  output.buffer, frameNumber);
4192
4193            rc = channel->request(output.buffer, frameNumber, indexUsed);
4194
4195            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4196            uint32_t j = 0;
4197            for (j = 0; j < streamsArray.num_streams; j++) {
4198                if (streamsArray.stream_request[j].streamID == streamId) {
4199                    if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4200                        streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4201                    else
4202                        streamsArray.stream_request[j].buf_index = indexUsed;
4203                    break;
4204                }
4205            }
4206            if (j == streamsArray.num_streams) {
4207                LOGE("Did not find matching stream to update index");
4208                assert(0);
4209            }
4210
4211            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4212                    && mBatchSize) {
4213                mToBeQueuedVidBufs++;
4214                if (mToBeQueuedVidBufs == mBatchSize) {
4215                    channel->queueBatchBuf();
4216                }
4217            }
4218            if (rc < 0) {
4219                LOGE("request failed");
4220                pthread_mutex_unlock(&mMutex);
4221                return rc;
4222            }
4223        }
4224        pendingBufferIter++;
4225    }
4226
4227    //If 2 streams have need_metadata set to true, fail the request, unless
4228    //we copy/reference count the metadata buffer
4229    if (streams_need_metadata > 1) {
4230        LOGE("not supporting request in which two streams requires"
4231                " 2 HAL metadata for reprocessing");
4232        pthread_mutex_unlock(&mMutex);
4233        return -EINVAL;
4234    }
4235
4236    if (request->input_buffer == NULL) {
4237        /* Set the parameters to backend:
4238         * - For every request in NORMAL MODE
4239         * - For every request in HFR mode during preview only case
4240         * - Once every batch in HFR mode during video recording
4241         */
4242        if (!mBatchSize ||
4243           (mBatchSize && !isVidBufRequested) ||
4244           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4245            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4246                     mBatchSize, isVidBufRequested,
4247                    mToBeQueuedVidBufs);
4248
4249            if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4250                for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4251                    uint32_t m = 0;
4252                    for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4253                        if (streamsArray.stream_request[k].streamID ==
4254                                mBatchedStreamsArray.stream_request[m].streamID)
4255                            break;
4256                        }
4257                        if (m == mBatchedStreamsArray.num_streams) {
4258                            mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4259                                streamsArray.stream_request[k].streamID;
4260                            mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4261                                streamsArray.stream_request[k].buf_index;
4262                            mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4263                        }
4264                }
4265                streamsArray = mBatchedStreamsArray;
4266            }
4267            /* Update stream id of all the requested buffers */
4268            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4269                LOGE("Failed to set stream type mask in the parameters");
4270                return BAD_VALUE;
4271            }
4272
4273            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4274                    mParameters);
4275            if (rc < 0) {
4276                LOGE("set_parms failed");
4277            }
4278            /* reset to zero coz, the batch is queued */
4279            mToBeQueuedVidBufs = 0;
4280            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4281            memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4282        } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4283            for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4284                uint32_t m = 0;
4285                for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4286                    if (streamsArray.stream_request[k].streamID ==
4287                            mBatchedStreamsArray.stream_request[m].streamID)
4288                        break;
4289                }
4290                if (m == mBatchedStreamsArray.num_streams) {
4291                    mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4292                        streamsArray.stream_request[k].streamID;
4293                    mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4294                        streamsArray.stream_request[k].buf_index;
4295                    mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4296                }
4297            }
4298        }
4299        mPendingLiveRequest++;
4300    }
4301
4302    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4303
4304    mState = STARTED;
4305    // Added a timed condition wait
4306    struct timespec ts;
4307    uint8_t isValidTimeout = 1;
4308    rc = clock_gettime(CLOCK_REALTIME, &ts);
4309    if (rc < 0) {
4310      isValidTimeout = 0;
4311      LOGE("Error reading the real time clock!!");
4312    }
4313    else {
4314      // Make timeout as 5 sec for request to be honored
4315      ts.tv_sec += 5;
4316    }
4317    //Block on conditional variable
4318    while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4319            (mState != ERROR) && (mState != DEINIT)) {
4320        if (!isValidTimeout) {
4321            LOGD("Blocking on conditional wait");
4322            pthread_cond_wait(&mRequestCond, &mMutex);
4323        }
4324        else {
4325            LOGD("Blocking on timed conditional wait");
4326            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4327            if (rc == ETIMEDOUT) {
4328                rc = -ENODEV;
4329                LOGE("Unblocked on timeout!!!!");
4330                break;
4331            }
4332        }
4333        LOGD("Unblocked");
4334        if (mWokenUpByDaemon) {
4335            mWokenUpByDaemon = false;
4336            if (mPendingLiveRequest < mMaxInFlightRequests)
4337                break;
4338        }
4339    }
4340    pthread_mutex_unlock(&mMutex);
4341
4342    return rc;
4343}
4344
4345/*===========================================================================
4346 * FUNCTION   : dump
4347 *
4348 * DESCRIPTION:
4349 *
4350 * PARAMETERS :
4351 *
4352 *
4353 * RETURN     :
4354 *==========================================================================*/
4355void QCamera3HardwareInterface::dump(int fd)
4356{
4357    pthread_mutex_lock(&mMutex);
4358    dprintf(fd, "\n Camera HAL3 information Begin \n");
4359
4360    dprintf(fd, "\nNumber of pending requests: %zu \n",
4361        mPendingRequestsList.size());
4362    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4363    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4364    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4365    for(pendingRequestIterator i = mPendingRequestsList.begin();
4366            i != mPendingRequestsList.end(); i++) {
4367        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4368        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4369        i->input_buffer);
4370    }
4371    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4372                mPendingBuffersMap.get_num_overall_buffers());
4373    dprintf(fd, "-------+------------------\n");
4374    dprintf(fd, " Frame | Stream type mask \n");
4375    dprintf(fd, "-------+------------------\n");
4376    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4377        for(auto &j : req.mPendingBufferList) {
4378            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4379            dprintf(fd, " %5d | %11d \n",
4380                    req.frame_number, channel->getStreamTypeMask());
4381        }
4382    }
4383    dprintf(fd, "-------+------------------\n");
4384
4385    dprintf(fd, "\nPending frame drop list: %zu\n",
4386        mPendingFrameDropList.size());
4387    dprintf(fd, "-------+-----------\n");
4388    dprintf(fd, " Frame | Stream ID \n");
4389    dprintf(fd, "-------+-----------\n");
4390    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4391        i != mPendingFrameDropList.end(); i++) {
4392        dprintf(fd, " %5d | %9d \n",
4393            i->frame_number, i->stream_ID);
4394    }
4395    dprintf(fd, "-------+-----------\n");
4396
4397    dprintf(fd, "\n Camera HAL3 information End \n");
4398
4399    /* use dumpsys media.camera as trigger to send update debug level event */
4400    mUpdateDebugLevel = true;
4401    pthread_mutex_unlock(&mMutex);
4402    return;
4403}
4404
4405/*===========================================================================
4406 * FUNCTION   : flush
4407 *
4408 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4409 *              conditionally restarts channels
4410 *
4411 * PARAMETERS :
4412 *  @ restartChannels: re-start all channels
4413 *
4414 *
4415 * RETURN     :
4416 *          0 on success
4417 *          Error code on failure
4418 *==========================================================================*/
4419int QCamera3HardwareInterface::flush(bool restartChannels)
4420{
4421    KPI_ATRACE_CALL();
4422    int32_t rc = NO_ERROR;
4423
4424    LOGD("Unblocking Process Capture Request");
4425    pthread_mutex_lock(&mMutex);
4426    mFlush = true;
4427    pthread_mutex_unlock(&mMutex);
4428
4429    rc = stopAllChannels();
4430    // unlink of dualcam
4431    if (mIsDeviceLinked) {
4432        m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4433        pthread_mutex_lock(&gCamLock);
4434
4435        if (mIsMainCamera == 1) {
4436            m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4437            m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4438            // related session id should be session id of linked session
4439            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4440        } else {
4441            m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4442            m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4443            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4444        }
4445        pthread_mutex_unlock(&gCamLock);
4446
4447        rc = mCameraHandle->ops->sync_related_sensors(
4448                mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4449        if (rc < 0) {
4450            LOGE("Dualcam: Unlink failed, but still proceed to close");
4451        }
4452    }
4453
4454    if (rc < 0) {
4455        LOGE("stopAllChannels failed");
4456        return rc;
4457    }
4458    if (mChannelHandle) {
4459        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4460                mChannelHandle);
4461    }
4462
4463    // Reset bundle info
4464    rc = setBundleInfo();
4465    if (rc < 0) {
4466        LOGE("setBundleInfo failed %d", rc);
4467        return rc;
4468    }
4469
4470    // Mutex Lock
4471    pthread_mutex_lock(&mMutex);
4472
4473    // Unblock process_capture_request
4474    mPendingLiveRequest = 0;
4475    pthread_cond_signal(&mRequestCond);
4476
4477    rc = notifyErrorForPendingRequests();
4478    if (rc < 0) {
4479        LOGE("notifyErrorForPendingRequests failed");
4480        pthread_mutex_unlock(&mMutex);
4481        return rc;
4482    }
4483
4484    mFlush = false;
4485
4486    // Start the Streams/Channels
4487    if (restartChannels) {
4488        rc = startAllChannels();
4489        if (rc < 0) {
4490            LOGE("startAllChannels failed");
4491            pthread_mutex_unlock(&mMutex);
4492            return rc;
4493        }
4494    }
4495
4496    if (mChannelHandle) {
4497        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4498                    mChannelHandle);
4499        if (rc < 0) {
4500            LOGE("start_channel failed");
4501            pthread_mutex_unlock(&mMutex);
4502            return rc;
4503        }
4504    }
4505
4506    pthread_mutex_unlock(&mMutex);
4507
4508    return 0;
4509}
4510
4511/*===========================================================================
4512 * FUNCTION   : flushPerf
4513 *
4514 * DESCRIPTION: This is the performance optimization version of flush that does
4515 *              not use stream off, rather flushes the system
4516 *
4517 * PARAMETERS :
4518 *
4519 *
4520 * RETURN     : 0 : success
4521 *              -EINVAL: input is malformed (device is not valid)
4522 *              -ENODEV: if the device has encountered a serious error
4523 *==========================================================================*/
4524int QCamera3HardwareInterface::flushPerf()
4525{
4526    ATRACE_CALL();
4527    int32_t rc = 0;
4528    struct timespec timeout;
4529    bool timed_wait = false;
4530
4531    pthread_mutex_lock(&mMutex);
4532    mFlushPerf = true;
4533    mPendingBuffersMap.numPendingBufsAtFlush =
4534        mPendingBuffersMap.get_num_overall_buffers();
4535    LOGD("Calling flush. Wait for %d buffers to return",
4536        mPendingBuffersMap.numPendingBufsAtFlush);
4537
4538    /* send the flush event to the backend */
4539    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4540    if (rc < 0) {
4541        LOGE("Error in flush: IOCTL failure");
4542        mFlushPerf = false;
4543        pthread_mutex_unlock(&mMutex);
4544        return -ENODEV;
4545    }
4546
4547    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4548        LOGD("No pending buffers in HAL, return flush");
4549        mFlushPerf = false;
4550        pthread_mutex_unlock(&mMutex);
4551        return rc;
4552    }
4553
4554    /* wait on a signal that buffers were received */
4555    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4556    if (rc < 0) {
4557        LOGE("Error reading the real time clock, cannot use timed wait");
4558    } else {
4559        timeout.tv_sec += FLUSH_TIMEOUT;
4560        timed_wait = true;
4561    }
4562
4563    //Block on conditional variable
4564    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4565        LOGD("Waiting on mBuffersCond");
4566        if (!timed_wait) {
4567            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4568            if (rc != 0) {
4569                 LOGE("pthread_cond_wait failed due to rc = %s",
4570                        strerror(rc));
4571                 break;
4572            }
4573        } else {
4574            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4575            if (rc != 0) {
4576                LOGE("pthread_cond_timedwait failed due to rc = %s",
4577                            strerror(rc));
4578                break;
4579            }
4580        }
4581    }
4582    if (rc != 0) {
4583        mFlushPerf = false;
4584        pthread_mutex_unlock(&mMutex);
4585        return -ENODEV;
4586    }
4587
4588    LOGD("Received buffers, now safe to return them");
4589
4590    //make sure the channels handle flush
4591    //currently only required for the picture channel to release snapshot resources
4592    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4593            it != mStreamInfo.end(); it++) {
4594        QCamera3Channel *channel = (*it)->channel;
4595        if (channel) {
4596            rc = channel->flush();
4597            if (rc) {
4598               LOGE("Flushing the channels failed with error %d", rc);
4599               // even though the channel flush failed we need to continue and
4600               // return the buffers we have to the framework, however the return
4601               // value will be an error
4602               rc = -ENODEV;
4603            }
4604        }
4605    }
4606
4607    /* notify the frameworks and send errored results */
4608    rc = notifyErrorForPendingRequests();
4609    if (rc < 0) {
4610        LOGE("notifyErrorForPendingRequests failed");
4611        pthread_mutex_unlock(&mMutex);
4612        return rc;
4613    }
4614
4615    //unblock process_capture_request
4616    mPendingLiveRequest = 0;
4617    unblockRequestIfNecessary();
4618
4619    mFlushPerf = false;
4620    pthread_mutex_unlock(&mMutex);
4621    LOGD ("Flush Operation complete. rc = %d", rc);
4622    return rc;
4623}
4624
4625/*===========================================================================
4626 * FUNCTION   : handleCameraDeviceError
4627 *
4628 * DESCRIPTION: This function calls internal flush and notifies the error to
4629 *              framework and updates the state variable.
4630 *
4631 * PARAMETERS : None
4632 *
4633 * RETURN     : NO_ERROR on Success
4634 *              Error code on failure
4635 *==========================================================================*/
4636int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4637{
4638    int32_t rc = NO_ERROR;
4639
4640    pthread_mutex_lock(&mMutex);
4641    if (mState != ERROR) {
4642        //if mState != ERROR, nothing to be done
4643        pthread_mutex_unlock(&mMutex);
4644        return NO_ERROR;
4645    }
4646    pthread_mutex_unlock(&mMutex);
4647
4648    rc = flush(false /* restart channels */);
4649    if (NO_ERROR != rc) {
4650        LOGE("internal flush to handle mState = ERROR failed");
4651    }
4652
4653    pthread_mutex_lock(&mMutex);
4654    mState = DEINIT;
4655    pthread_mutex_unlock(&mMutex);
4656
4657    camera3_notify_msg_t notify_msg;
4658    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4659    notify_msg.type = CAMERA3_MSG_ERROR;
4660    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4661    notify_msg.message.error.error_stream = NULL;
4662    notify_msg.message.error.frame_number = 0;
4663    mCallbackOps->notify(mCallbackOps, &notify_msg);
4664
4665    return rc;
4666}
4667
4668/*===========================================================================
4669 * FUNCTION   : captureResultCb
4670 *
4671 * DESCRIPTION: Callback handler for all capture result
4672 *              (streams, as well as metadata)
4673 *
4674 * PARAMETERS :
4675 *   @metadata : metadata information
4676 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4677 *               NULL if metadata.
4678 *
4679 * RETURN     : NONE
4680 *==========================================================================*/
4681void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4682                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4683{
4684    if (metadata_buf) {
4685        if (mBatchSize) {
4686            handleBatchMetadata(metadata_buf,
4687                    true /* free_and_bufdone_meta_buf */);
4688        } else { /* mBatchSize = 0 */
4689            hdrPlusPerfLock(metadata_buf);
4690            pthread_mutex_lock(&mMutex);
4691            handleMetadataWithLock(metadata_buf,
4692                    true /* free_and_bufdone_meta_buf */,
4693                    false /* first frame of batch metadata */ );
4694            pthread_mutex_unlock(&mMutex);
4695        }
4696    } else if (isInputBuffer) {
4697        pthread_mutex_lock(&mMutex);
4698        handleInputBufferWithLock(frame_number);
4699        pthread_mutex_unlock(&mMutex);
4700    } else {
4701        pthread_mutex_lock(&mMutex);
4702        handleBufferWithLock(buffer, frame_number);
4703        pthread_mutex_unlock(&mMutex);
4704    }
4705    return;
4706}
4707
4708/*===========================================================================
4709 * FUNCTION   : getReprocessibleOutputStreamId
4710 *
4711 * DESCRIPTION: Get source output stream id for the input reprocess stream
4712 *              based on size and format, which would be the largest
4713 *              output stream if an input stream exists.
4714 *
4715 * PARAMETERS :
4716 *   @id      : return the stream id if found
4717 *
4718 * RETURN     : int32_t type of status
4719 *              NO_ERROR  -- success
4720 *              none-zero failure code
4721 *==========================================================================*/
4722int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4723{
4724    /* check if any output or bidirectional stream with the same size and format
4725       and return that stream */
4726    if ((mInputStreamInfo.dim.width > 0) &&
4727            (mInputStreamInfo.dim.height > 0)) {
4728        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4729                it != mStreamInfo.end(); it++) {
4730
4731            camera3_stream_t *stream = (*it)->stream;
4732            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4733                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4734                    (stream->format == mInputStreamInfo.format)) {
4735                // Usage flag for an input stream and the source output stream
4736                // may be different.
4737                LOGD("Found reprocessible output stream! %p", *it);
4738                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4739                         stream->usage, mInputStreamInfo.usage);
4740
4741                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4742                if (channel != NULL && channel->mStreams[0]) {
4743                    id = channel->mStreams[0]->getMyServerID();
4744                    return NO_ERROR;
4745                }
4746            }
4747        }
4748    } else {
4749        LOGD("No input stream, so no reprocessible output stream");
4750    }
4751    return NAME_NOT_FOUND;
4752}
4753
4754/*===========================================================================
4755 * FUNCTION   : lookupFwkName
4756 *
4757 * DESCRIPTION: In case the enum is not same in fwk and backend
4758 *              make sure the parameter is correctly propogated
4759 *
4760 * PARAMETERS  :
4761 *   @arr      : map between the two enums
4762 *   @len      : len of the map
4763 *   @hal_name : name of the hal_parm to map
4764 *
4765 * RETURN     : int type of status
4766 *              fwk_name  -- success
4767 *              none-zero failure code
4768 *==========================================================================*/
4769template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4770        size_t len, halType hal_name)
4771{
4772
4773    for (size_t i = 0; i < len; i++) {
4774        if (arr[i].hal_name == hal_name) {
4775            return arr[i].fwk_name;
4776        }
4777    }
4778
4779    /* Not able to find matching framework type is not necessarily
4780     * an error case. This happens when mm-camera supports more attributes
4781     * than the frameworks do */
4782    LOGH("Cannot find matching framework type");
4783    return NAME_NOT_FOUND;
4784}
4785
4786/*===========================================================================
4787 * FUNCTION   : lookupHalName
4788 *
4789 * DESCRIPTION: In case the enum is not same in fwk and backend
4790 *              make sure the parameter is correctly propogated
4791 *
4792 * PARAMETERS  :
4793 *   @arr      : map between the two enums
4794 *   @len      : len of the map
4795 *   @fwk_name : name of the hal_parm to map
4796 *
4797 * RETURN     : int32_t type of status
4798 *              hal_name  -- success
4799 *              none-zero failure code
4800 *==========================================================================*/
4801template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4802        size_t len, fwkType fwk_name)
4803{
4804    for (size_t i = 0; i < len; i++) {
4805        if (arr[i].fwk_name == fwk_name) {
4806            return arr[i].hal_name;
4807        }
4808    }
4809
4810    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4811    return NAME_NOT_FOUND;
4812}
4813
4814/*===========================================================================
4815 * FUNCTION   : lookupProp
4816 *
4817 * DESCRIPTION: lookup a value by its name
4818 *
4819 * PARAMETERS :
4820 *   @arr     : map between the two enums
4821 *   @len     : size of the map
4822 *   @name    : name to be looked up
4823 *
4824 * RETURN     : Value if found
4825 *              CAM_CDS_MODE_MAX if not found
4826 *==========================================================================*/
4827template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4828        size_t len, const char *name)
4829{
4830    if (name) {
4831        for (size_t i = 0; i < len; i++) {
4832            if (!strcmp(arr[i].desc, name)) {
4833                return arr[i].val;
4834            }
4835        }
4836    }
4837    return CAM_CDS_MODE_MAX;
4838}
4839
4840/*===========================================================================
4841 *
4842 * DESCRIPTION:
4843 *
4844 * PARAMETERS :
4845 *   @metadata : metadata information from callback
4846 *   @timestamp: metadata buffer timestamp
4847 *   @request_id: request id
4848 *   @jpegMetadata: additional jpeg metadata
4849 *   @hybrid_ae_enable: whether hybrid ae is enabled
4850 *   // DevCamDebug metadata
4851 *   @DevCamDebug_meta_enable: enable DevCamDebug meta
4852 *   // DevCamDebug metadata end
4853 *   @pprocDone: whether internal offline postprocsesing is done
4854 *
4855 * RETURN     : camera_metadata_t*
4856 *              metadata in a format specified by fwk
4857 *==========================================================================*/
4858camera_metadata_t*
4859QCamera3HardwareInterface::translateFromHalMetadata(
4860                                 metadata_buffer_t *metadata,
4861                                 nsecs_t timestamp,
4862                                 int32_t request_id,
4863                                 const CameraMetadata& jpegMetadata,
4864                                 uint8_t pipeline_depth,
4865                                 uint8_t capture_intent,
4866                                 uint8_t hybrid_ae_enable,
4867                                 /* DevCamDebug metadata translateFromHalMetadata argument */
4868                                 uint8_t DevCamDebug_meta_enable,
4869                                 /* DevCamDebug metadata end */
4870                                 bool pprocDone,
4871                                 uint8_t fwk_cacMode,
4872                                 bool firstMetadataInBatch)
4873{
4874    CameraMetadata camMetadata;
4875    camera_metadata_t *resultMetadata;
4876
4877    if (mBatchSize && !firstMetadataInBatch) {
4878        /* In batch mode, use cached metadata from the first metadata
4879            in the batch */
4880        camMetadata.clear();
4881        camMetadata = mCachedMetadata;
4882    }
4883
4884    if (jpegMetadata.entryCount())
4885        camMetadata.append(jpegMetadata);
4886
4887    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4888    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4889    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4890    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4891    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4892    if (mBatchSize == 0) {
4893        // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
4894        camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
4895    }
4896
4897    if (mBatchSize && !firstMetadataInBatch) {
4898        /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4899        resultMetadata = camMetadata.release();
4900        return resultMetadata;
4901    }
4902
4903    // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
4904    // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
4905    if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
4906        // DevCamDebug metadata translateFromHalMetadata AF
4907        IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
4908                CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
4909            int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
4910            camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
4911        }
4912        IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
4913                CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
4914            int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
4915            camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
4916        }
4917        IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
4918                CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
4919            int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
4920            camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
4921        }
4922        IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
4923                CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
4924            int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
4925            camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
4926        }
4927        IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
4928                CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
4929            int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
4930            camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
4931        }
4932        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
4933                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
4934            int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
4935                *DevCamDebug_af_monitor_pdaf_target_pos;
4936            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
4937                &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
4938        }
4939        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
4940                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
4941            int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
4942                *DevCamDebug_af_monitor_pdaf_confidence;
4943            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
4944                &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
4945        }
4946        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
4947                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
4948            int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
4949            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
4950                &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
4951        }
4952        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
4953                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
4954            int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
4955                *DevCamDebug_af_monitor_tof_target_pos;
4956            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
4957                &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
4958        }
4959        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
4960                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
4961            int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
4962                *DevCamDebug_af_monitor_tof_confidence;
4963            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
4964                &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
4965        }
4966        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
4967                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
4968            int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
4969            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
4970                &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
4971        }
4972        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
4973                CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
4974            int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
4975            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
4976                &fwk_DevCamDebug_af_monitor_type_select, 1);
4977        }
4978        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
4979                CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
4980            int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
4981            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
4982                &fwk_DevCamDebug_af_monitor_refocus, 1);
4983        }
4984        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
4985                CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
4986            int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
4987            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
4988                &fwk_DevCamDebug_af_monitor_target_pos, 1);
4989        }
4990        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
4991                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
4992            int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
4993                *DevCamDebug_af_search_pdaf_target_pos;
4994            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
4995                &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
4996        }
4997        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
4998                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
4999            int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5000            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5001                &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5002        }
5003        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5004                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5005            int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5006            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5007                &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5008        }
5009        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5010                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5011            int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5012            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5013                &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5014        }
5015        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5016                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5017            int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5018            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5019                &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5020        }
5021        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5022                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5023            int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5024                *DevCamDebug_af_search_tof_target_pos;
5025            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5026                &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5027        }
5028        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5029                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5030            int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5031            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5032                &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5033        }
5034        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5035                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5036            int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5037            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5038                &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5039        }
5040        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5041                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5042            int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5043            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5044                &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5045        }
5046        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5047                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5048            int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5049            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5050                &fwk_DevCamDebug_af_search_tof_confidence, 1);
5051        }
5052        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5053                CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5054            int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5055            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5056                &fwk_DevCamDebug_af_search_type_select, 1);
5057        }
5058        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5059                CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5060            int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5061            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5062                &fwk_DevCamDebug_af_search_next_pos, 1);
5063        }
5064        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5065                CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5066            int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5067            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5068                &fwk_DevCamDebug_af_search_target_pos, 1);
5069        }
5070        // DevCamDebug metadata translateFromHalMetadata AEC
5071        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5072                CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5073            int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5074            camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5075    }
5076        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5077                CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5078            int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5079            camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5080        }
5081        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5082                CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5083            int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5084            camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5085        }
5086        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5087                CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5088            int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5089            camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5090        }
5091        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5092                CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5093            int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5094            camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5095        }
5096        IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5097                CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5098            float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5099            camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5100        }
5101        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5102                CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5103            int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5104            camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5105        }
5106        IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5107                CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5108            float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5109            camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5110        }
5111        // DevCamDebug metadata translateFromHalMetadata AWB
5112        IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5113                CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5114            float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5115            camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5116        }
5117        IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5118                CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5119            float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5120            camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5121        }
5122        IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5123                CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5124            float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5125            camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5126        }
5127        IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5128                CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5129            int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5130            camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5131        }
5132        IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5133                CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5134            int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5135            camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5136        }
5137    }
5138    // atrace_end(ATRACE_TAG_ALWAYS);
5139
5140    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5141        int64_t fwk_frame_number = *frame_number;
5142        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5143    }
5144
5145    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5146        int32_t fps_range[2];
5147        fps_range[0] = (int32_t)float_range->min_fps;
5148        fps_range[1] = (int32_t)float_range->max_fps;
5149        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5150                                      fps_range, 2);
5151        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5152             fps_range[0], fps_range[1]);
5153    }
5154
5155    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5156        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5157    }
5158
5159    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5160        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5161                METADATA_MAP_SIZE(SCENE_MODES_MAP),
5162                *sceneMode);
5163        if (NAME_NOT_FOUND != val) {
5164            uint8_t fwkSceneMode = (uint8_t)val;
5165            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5166            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5167                     fwkSceneMode);
5168        }
5169    }
5170
5171    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5172        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5173        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5174    }
5175
5176    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5177        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5178        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5179    }
5180
5181    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5182        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5183        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5184    }
5185
5186    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5187            CAM_INTF_META_EDGE_MODE, metadata) {
5188        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5189    }
5190
5191    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5192        uint8_t fwk_flashPower = (uint8_t) *flashPower;
5193        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5194    }
5195
5196    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5197        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5198    }
5199
5200    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5201        if (0 <= *flashState) {
5202            uint8_t fwk_flashState = (uint8_t) *flashState;
5203            if (!gCamCapability[mCameraId]->flash_available) {
5204                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5205            }
5206            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5207        }
5208    }
5209
5210    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5211        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5212        if (NAME_NOT_FOUND != val) {
5213            uint8_t fwk_flashMode = (uint8_t)val;
5214            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5215        }
5216    }
5217
5218    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5219        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5220        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5221    }
5222
5223    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5224        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5225    }
5226
5227    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5228        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5229    }
5230
5231    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5232        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5233    }
5234
5235    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5236        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5237        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5238    }
5239
5240    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5241        uint8_t fwk_videoStab = (uint8_t) *videoStab;
5242        LOGD("fwk_videoStab = %d", fwk_videoStab);
5243        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5244    } else {
5245        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5246        // and so hardcoding the Video Stab result to OFF mode.
5247        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5248        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5249        LOGD("%s: EIS result default to OFF mode", __func__);
5250    }
5251
5252    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5253        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5254        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5255    }
5256
5257    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5258        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5259    }
5260
5261    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5262        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5263        float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
5264
5265        adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5266              gCamCapability[mCameraId]->color_arrangement);
5267
5268        LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
5269          blackLevelAppliedPattern->cam_black_level[0],
5270          blackLevelAppliedPattern->cam_black_level[1],
5271          blackLevelAppliedPattern->cam_black_level[2],
5272          blackLevelAppliedPattern->cam_black_level[3]);
5273        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5274                BLACK_LEVEL_PATTERN_CNT);
5275
5276        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5277        // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
5278        // depth space.
5279        fwk_blackLevelInd[0] /= 4.0;
5280        fwk_blackLevelInd[1] /= 4.0;
5281        fwk_blackLevelInd[2] /= 4.0;
5282        fwk_blackLevelInd[3] /= 4.0;
5283        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
5284                BLACK_LEVEL_PATTERN_CNT);
5285    }
5286
5287    // Fixed whitelevel is used by ISP/Sensor
5288    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5289            &gCamCapability[mCameraId]->white_level, 1);
5290
5291    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5292            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5293        int32_t scalerCropRegion[4];
5294        scalerCropRegion[0] = hScalerCropRegion->left;
5295        scalerCropRegion[1] = hScalerCropRegion->top;
5296        scalerCropRegion[2] = hScalerCropRegion->width;
5297        scalerCropRegion[3] = hScalerCropRegion->height;
5298
5299        // Adjust crop region from sensor output coordinate system to active
5300        // array coordinate system.
5301        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5302                scalerCropRegion[2], scalerCropRegion[3]);
5303
5304        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5305    }
5306
5307    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5308        LOGD("sensorExpTime = %lld", *sensorExpTime);
5309        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5310    }
5311
5312    IF_META_AVAILABLE(int64_t, sensorFameDuration,
5313            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5314        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5315        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5316    }
5317
5318    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5319            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5320        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5321        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5322                sensorRollingShutterSkew, 1);
5323    }
5324
5325    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5326        LOGD("sensorSensitivity = %d", *sensorSensitivity);
5327        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5328
5329        //calculate the noise profile based on sensitivity
5330        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5331        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5332        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5333        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5334            noise_profile[i]   = noise_profile_S;
5335            noise_profile[i+1] = noise_profile_O;
5336        }
5337        LOGD("noise model entry (S, O) is (%f, %f)",
5338                noise_profile_S, noise_profile_O);
5339        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5340                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5341    }
5342
5343    int32_t fwk_ispSensitivity = 100;
5344    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5345        fwk_ispSensitivity = (int32_t) *ispSensitivity;
5346    }
5347    IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5348        fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5349    }
5350    camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5351
5352    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5353        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5354        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5355    }
5356
5357    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5358        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5359                *faceDetectMode);
5360        if (NAME_NOT_FOUND != val) {
5361            uint8_t fwk_faceDetectMode = (uint8_t)val;
5362            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5363
5364            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5365                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5366                        CAM_INTF_META_FACE_DETECTION, metadata) {
5367                    uint8_t numFaces = MIN(
5368                            faceDetectionInfo->num_faces_detected, MAX_ROI);
5369                    int32_t faceIds[MAX_ROI];
5370                    uint8_t faceScores[MAX_ROI];
5371                    int32_t faceRectangles[MAX_ROI * 4];
5372                    int32_t faceLandmarks[MAX_ROI * 6];
5373                    size_t j = 0, k = 0;
5374
5375                    for (size_t i = 0; i < numFaces; i++) {
5376                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5377                        // Adjust crop region from sensor output coordinate system to active
5378                        // array coordinate system.
5379                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5380                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
5381                                rect.width, rect.height);
5382
5383                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5384                                faceRectangles+j, -1);
5385
5386                        j+= 4;
5387                    }
5388                    if (numFaces <= 0) {
5389                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5390                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5391                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5392                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5393                    }
5394
5395                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5396                            numFaces);
5397                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5398                            faceRectangles, numFaces * 4U);
5399                    if (fwk_faceDetectMode ==
5400                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5401                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5402                                CAM_INTF_META_FACE_LANDMARK, metadata) {
5403
5404                            for (size_t i = 0; i < numFaces; i++) {
5405                                // Map the co-ordinate sensor output coordinate system to active
5406                                // array coordinate system.
5407                                mCropRegionMapper.toActiveArray(
5408                                        landmarks->face_landmarks[i].left_eye_center.x,
5409                                        landmarks->face_landmarks[i].left_eye_center.y);
5410                                mCropRegionMapper.toActiveArray(
5411                                        landmarks->face_landmarks[i].right_eye_center.x,
5412                                        landmarks->face_landmarks[i].right_eye_center.y);
5413                                mCropRegionMapper.toActiveArray(
5414                                        landmarks->face_landmarks[i].mouth_center.x,
5415                                        landmarks->face_landmarks[i].mouth_center.y);
5416
5417                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5418                                k+= 6;
5419                            }
5420                        }
5421
5422                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5423                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5424                                faceLandmarks, numFaces * 6U);
5425                   }
5426                }
5427            }
5428        }
5429    }
5430
5431    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5432        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5433        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5434    }
5435
5436    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5437            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5438        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5439        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5440    }
5441
5442    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5443            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5444        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5445                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5446    }
5447
5448    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5449            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5450        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5451                CAM_MAX_SHADING_MAP_HEIGHT);
5452        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5453                CAM_MAX_SHADING_MAP_WIDTH);
5454        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5455                lensShadingMap->lens_shading, 4U * map_width * map_height);
5456    }
5457
5458    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5459        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5460        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5461    }
5462
5463    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5464        //Populate CAM_INTF_META_TONEMAP_CURVES
5465        /* ch0 = G, ch 1 = B, ch 2 = R*/
5466        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5467            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5468                     tonemap->tonemap_points_cnt,
5469                    CAM_MAX_TONEMAP_CURVE_SIZE);
5470            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5471        }
5472
5473        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5474                        &tonemap->curves[0].tonemap_points[0][0],
5475                        tonemap->tonemap_points_cnt * 2);
5476
5477        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5478                        &tonemap->curves[1].tonemap_points[0][0],
5479                        tonemap->tonemap_points_cnt * 2);
5480
5481        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5482                        &tonemap->curves[2].tonemap_points[0][0],
5483                        tonemap->tonemap_points_cnt * 2);
5484    }
5485
5486    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5487            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5488        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5489                CC_GAINS_COUNT);
5490    }
5491
5492    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5493            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5494        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5495                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5496                CC_MATRIX_COLS * CC_MATRIX_ROWS);
5497    }
5498
5499    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5500            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5501        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5502            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5503                     toneCurve->tonemap_points_cnt,
5504                    CAM_MAX_TONEMAP_CURVE_SIZE);
5505            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5506        }
5507        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5508                (float*)toneCurve->curve.tonemap_points,
5509                toneCurve->tonemap_points_cnt * 2);
5510    }
5511
5512    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5513            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5514        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5515                predColorCorrectionGains->gains, 4);
5516    }
5517
5518    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5519            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5520        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5521                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5522                CC_MATRIX_ROWS * CC_MATRIX_COLS);
5523    }
5524
5525    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5526        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5527    }
5528
5529    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5530        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5531        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5532    }
5533
5534    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5535        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5536        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5537    }
5538
5539    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5540        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5541                *effectMode);
5542        if (NAME_NOT_FOUND != val) {
5543            uint8_t fwk_effectMode = (uint8_t)val;
5544            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5545        }
5546    }
5547
5548    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5549            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5550        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5551                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5552        if (NAME_NOT_FOUND != fwk_testPatternMode) {
5553            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5554        }
5555        int32_t fwk_testPatternData[4];
5556        fwk_testPatternData[0] = testPatternData->r;
5557        fwk_testPatternData[3] = testPatternData->b;
5558        switch (gCamCapability[mCameraId]->color_arrangement) {
5559        case CAM_FILTER_ARRANGEMENT_RGGB:
5560        case CAM_FILTER_ARRANGEMENT_GRBG:
5561            fwk_testPatternData[1] = testPatternData->gr;
5562            fwk_testPatternData[2] = testPatternData->gb;
5563            break;
5564        case CAM_FILTER_ARRANGEMENT_GBRG:
5565        case CAM_FILTER_ARRANGEMENT_BGGR:
5566            fwk_testPatternData[2] = testPatternData->gr;
5567            fwk_testPatternData[1] = testPatternData->gb;
5568            break;
5569        default:
5570            LOGE("color arrangement %d is not supported",
5571                gCamCapability[mCameraId]->color_arrangement);
5572            break;
5573        }
5574        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5575    }
5576
5577    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5578        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5579    }
5580
5581    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5582        String8 str((const char *)gps_methods);
5583        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5584    }
5585
5586    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5587        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5588    }
5589
5590    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5591        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5592    }
5593
5594    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5595        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5596        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5597    }
5598
5599    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5600        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5601        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5602    }
5603
5604    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5605        int32_t fwk_thumb_size[2];
5606        fwk_thumb_size[0] = thumb_size->width;
5607        fwk_thumb_size[1] = thumb_size->height;
5608        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5609    }
5610
5611    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5612        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5613                privateData,
5614                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5615    }
5616
5617    if (metadata->is_tuning_params_valid) {
5618        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5619        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5620        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5621
5622
5623        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5624                sizeof(uint32_t));
5625        data += sizeof(uint32_t);
5626
5627        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5628                sizeof(uint32_t));
5629        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5630        data += sizeof(uint32_t);
5631
5632        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5633                sizeof(uint32_t));
5634        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5635        data += sizeof(uint32_t);
5636
5637        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5638                sizeof(uint32_t));
5639        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5640        data += sizeof(uint32_t);
5641
5642        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5643                sizeof(uint32_t));
5644        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5645        data += sizeof(uint32_t);
5646
5647        metadata->tuning_params.tuning_mod3_data_size = 0;
5648        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5649                sizeof(uint32_t));
5650        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5651        data += sizeof(uint32_t);
5652
5653        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5654                TUNING_SENSOR_DATA_MAX);
5655        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5656                count);
5657        data += count;
5658
5659        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5660                TUNING_VFE_DATA_MAX);
5661        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5662                count);
5663        data += count;
5664
5665        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5666                TUNING_CPP_DATA_MAX);
5667        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5668                count);
5669        data += count;
5670
5671        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5672                TUNING_CAC_DATA_MAX);
5673        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5674                count);
5675        data += count;
5676
5677        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5678                (int32_t *)(void *)tuning_meta_data_blob,
5679                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5680    }
5681
5682    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5683            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5684        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5685                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5686                NEUTRAL_COL_POINTS);
5687    }
5688
5689    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5690        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5691        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5692    }
5693
5694    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5695        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5696        // Adjust crop region from sensor output coordinate system to active
5697        // array coordinate system.
5698        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5699                hAeRegions->rect.width, hAeRegions->rect.height);
5700
5701        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5702        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5703                REGIONS_TUPLE_COUNT);
5704        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5705                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5706                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5707                hAeRegions->rect.height);
5708    }
5709
5710    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5711        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5712        if (NAME_NOT_FOUND != val) {
5713            uint8_t fwkAfMode = (uint8_t)val;
5714            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5715            LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5716        } else {
5717            LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5718                    val);
5719        }
5720    }
5721
5722    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5723        uint8_t fwk_afState = (uint8_t) *afState;
5724        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5725        LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5726    }
5727
5728    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5729        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5730    }
5731
5732    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5733        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5734    }
5735
5736    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5737        uint8_t fwk_lensState = *lensState;
5738        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5739    }
5740
5741    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5742        /*af regions*/
5743        int32_t afRegions[REGIONS_TUPLE_COUNT];
5744        // Adjust crop region from sensor output coordinate system to active
5745        // array coordinate system.
5746        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5747                hAfRegions->rect.width, hAfRegions->rect.height);
5748
5749        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5750        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5751                REGIONS_TUPLE_COUNT);
5752        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5753                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5754                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5755                hAfRegions->rect.height);
5756    }
5757
5758    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5759        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5760                *hal_ab_mode);
5761        if (NAME_NOT_FOUND != val) {
5762            uint8_t fwk_ab_mode = (uint8_t)val;
5763            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5764        }
5765    }
5766
5767    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5768        int val = lookupFwkName(SCENE_MODES_MAP,
5769                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5770        if (NAME_NOT_FOUND != val) {
5771            uint8_t fwkBestshotMode = (uint8_t)val;
5772            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5773            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5774        } else {
5775            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5776        }
5777    }
5778
5779    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5780         uint8_t fwk_mode = (uint8_t) *mode;
5781         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5782    }
5783
5784    /* Constant metadata values to be update*/
5785    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5786    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5787
5788    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5789    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5790
5791    int32_t hotPixelMap[2];
5792    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5793
5794    // CDS
5795    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5796        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5797    }
5798
5799    // TNR
5800    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5801        uint8_t tnr_enable       = tnr->denoise_enable;
5802        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5803
5804        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5805        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5806    }
5807
5808    // Reprocess crop data
5809    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5810        uint8_t cnt = crop_data->num_of_streams;
5811        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5812            // mm-qcamera-daemon only posts crop_data for streams
5813            // not linked to pproc. So no valid crop metadata is not
5814            // necessarily an error case.
5815            LOGD("No valid crop metadata entries");
5816        } else {
5817            uint32_t reproc_stream_id;
5818            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5819                LOGD("No reprocessible stream found, ignore crop data");
5820            } else {
5821                int rc = NO_ERROR;
5822                Vector<int32_t> roi_map;
5823                int32_t *crop = new int32_t[cnt*4];
5824                if (NULL == crop) {
5825                   rc = NO_MEMORY;
5826                }
5827                if (NO_ERROR == rc) {
5828                    int32_t streams_found = 0;
5829                    for (size_t i = 0; i < cnt; i++) {
5830                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5831                            if (pprocDone) {
5832                                // HAL already does internal reprocessing,
5833                                // either via reprocessing before JPEG encoding,
5834                                // or offline postprocessing for pproc bypass case.
5835                                crop[0] = 0;
5836                                crop[1] = 0;
5837                                crop[2] = mInputStreamInfo.dim.width;
5838                                crop[3] = mInputStreamInfo.dim.height;
5839                            } else {
5840                                crop[0] = crop_data->crop_info[i].crop.left;
5841                                crop[1] = crop_data->crop_info[i].crop.top;
5842                                crop[2] = crop_data->crop_info[i].crop.width;
5843                                crop[3] = crop_data->crop_info[i].crop.height;
5844                            }
5845                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5846                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5847                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5848                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5849                            streams_found++;
5850                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5851                                    crop[0], crop[1], crop[2], crop[3]);
5852                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5853                                    crop_data->crop_info[i].roi_map.left,
5854                                    crop_data->crop_info[i].roi_map.top,
5855                                    crop_data->crop_info[i].roi_map.width,
5856                                    crop_data->crop_info[i].roi_map.height);
5857                            break;
5858
5859                       }
5860                    }
5861                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5862                            &streams_found, 1);
5863                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5864                            crop, (size_t)(streams_found * 4));
5865                    if (roi_map.array()) {
5866                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5867                                roi_map.array(), roi_map.size());
5868                    }
5869               }
5870               if (crop) {
5871                   delete [] crop;
5872               }
5873            }
5874        }
5875    }
5876
5877    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5878        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5879        // so hardcoding the CAC result to OFF mode.
5880        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5881        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5882    } else {
5883        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5884            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5885                    *cacMode);
5886            if (NAME_NOT_FOUND != val) {
5887                uint8_t resultCacMode = (uint8_t)val;
5888                // check whether CAC result from CB is equal to Framework set CAC mode
5889                // If not equal then set the CAC mode came in corresponding request
5890                if (fwk_cacMode != resultCacMode) {
5891                    resultCacMode = fwk_cacMode;
5892                }
5893                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5894                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5895            } else {
5896                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5897            }
5898        }
5899    }
5900
5901    // Post blob of cam_cds_data through vendor tag.
5902    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5903        uint8_t cnt = cdsInfo->num_of_streams;
5904        cam_cds_data_t cdsDataOverride;
5905        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5906        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5907        cdsDataOverride.num_of_streams = 1;
5908        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5909            uint32_t reproc_stream_id;
5910            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5911                LOGD("No reprocessible stream found, ignore cds data");
5912            } else {
5913                for (size_t i = 0; i < cnt; i++) {
5914                    if (cdsInfo->cds_info[i].stream_id ==
5915                            reproc_stream_id) {
5916                        cdsDataOverride.cds_info[0].cds_enable =
5917                                cdsInfo->cds_info[i].cds_enable;
5918                        break;
5919                    }
5920                }
5921            }
5922        } else {
5923            LOGD("Invalid stream count %d in CDS_DATA", cnt);
5924        }
5925        camMetadata.update(QCAMERA3_CDS_INFO,
5926                (uint8_t *)&cdsDataOverride,
5927                sizeof(cam_cds_data_t));
5928    }
5929
5930    // Ldaf calibration data
5931    if (!mLdafCalibExist) {
5932        IF_META_AVAILABLE(uint32_t, ldafCalib,
5933                CAM_INTF_META_LDAF_EXIF, metadata) {
5934            mLdafCalibExist = true;
5935            mLdafCalib[0] = ldafCalib[0];
5936            mLdafCalib[1] = ldafCalib[1];
5937            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5938                    ldafCalib[0], ldafCalib[1]);
5939        }
5940    }
5941
5942    // AF scene change
5943    IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
5944        camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
5945    }
5946
5947    /* In batch mode, cache the first metadata in the batch */
5948    if (mBatchSize && firstMetadataInBatch) {
5949        mCachedMetadata.clear();
5950        mCachedMetadata = camMetadata;
5951    }
5952
5953    resultMetadata = camMetadata.release();
5954    return resultMetadata;
5955}
5956
5957/*===========================================================================
5958 * FUNCTION   : saveExifParams
5959 *
5960 * DESCRIPTION:
5961 *
5962 * PARAMETERS :
5963 *   @metadata : metadata information from callback
5964 *
5965 * RETURN     : none
5966 *
5967 *==========================================================================*/
5968void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5969{
5970    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5971            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5972        if (mExifParams.debug_params) {
5973            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5974            mExifParams.debug_params->ae_debug_params_valid = TRUE;
5975        }
5976    }
5977    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5978            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5979        if (mExifParams.debug_params) {
5980            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5981            mExifParams.debug_params->awb_debug_params_valid = TRUE;
5982        }
5983    }
5984    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5985            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5986        if (mExifParams.debug_params) {
5987            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5988            mExifParams.debug_params->af_debug_params_valid = TRUE;
5989        }
5990    }
5991    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5992            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5993        if (mExifParams.debug_params) {
5994            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5995            mExifParams.debug_params->asd_debug_params_valid = TRUE;
5996        }
5997    }
5998    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5999            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6000        if (mExifParams.debug_params) {
6001            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6002            mExifParams.debug_params->stats_debug_params_valid = TRUE;
6003        }
6004    }
6005    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6006            CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6007        if (mExifParams.debug_params) {
6008            mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6009            mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6010        }
6011    }
6012    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6013            CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6014        if (mExifParams.debug_params) {
6015            mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6016            mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6017        }
6018    }
6019    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6020            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6021        if (mExifParams.debug_params) {
6022            mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6023            mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6024        }
6025    }
6026}
6027
6028/*===========================================================================
6029 * FUNCTION   : get3AExifParams
6030 *
6031 * DESCRIPTION:
6032 *
6033 * PARAMETERS : none
6034 *
6035 *
6036 * RETURN     : mm_jpeg_exif_params_t
6037 *
6038 *==========================================================================*/
6039mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6040{
6041    return mExifParams;
6042}
6043
6044/*===========================================================================
6045 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
6046 *
6047 * DESCRIPTION:
6048 *
6049 * PARAMETERS :
6050 *   @metadata : metadata information from callback
6051 *
6052 * RETURN     : camera_metadata_t*
6053 *              metadata in a format specified by fwk
6054 *==========================================================================*/
6055camera_metadata_t*
6056QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6057                                (metadata_buffer_t *metadata)
6058{
6059    CameraMetadata camMetadata;
6060    camera_metadata_t *resultMetadata;
6061
6062
6063    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6064        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6065        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6066        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6067    }
6068
6069    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6070        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6071                &aecTrigger->trigger, 1);
6072        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6073                &aecTrigger->trigger_id, 1);
6074        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6075                 aecTrigger->trigger);
6076        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6077                aecTrigger->trigger_id);
6078    }
6079
6080    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6081        uint8_t fwk_ae_state = (uint8_t) *ae_state;
6082        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6083        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6084    }
6085
6086    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6087        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6088                &af_trigger->trigger, 1);
6089        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6090                 af_trigger->trigger);
6091        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6092        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6093                af_trigger->trigger_id);
6094    }
6095
6096    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6097        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6098                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6099        if (NAME_NOT_FOUND != val) {
6100            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6101            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6102            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6103        } else {
6104            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6105        }
6106    }
6107
6108    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6109    uint32_t aeMode = CAM_AE_MODE_MAX;
6110    int32_t flashMode = CAM_FLASH_MODE_MAX;
6111    int32_t redeye = -1;
6112    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6113        aeMode = *pAeMode;
6114    }
6115    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6116        flashMode = *pFlashMode;
6117    }
6118    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6119        redeye = *pRedeye;
6120    }
6121
6122    if (1 == redeye) {
6123        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6124        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6125    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6126        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6127                flashMode);
6128        if (NAME_NOT_FOUND != val) {
6129            fwk_aeMode = (uint8_t)val;
6130            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6131        } else {
6132            LOGE("Unsupported flash mode %d", flashMode);
6133        }
6134    } else if (aeMode == CAM_AE_MODE_ON) {
6135        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6136        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6137    } else if (aeMode == CAM_AE_MODE_OFF) {
6138        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6139        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6140    } else {
6141        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6142              "flashMode:%d, aeMode:%u!!!",
6143                 redeye, flashMode, aeMode);
6144    }
6145
6146    resultMetadata = camMetadata.release();
6147    return resultMetadata;
6148}
6149
6150/*===========================================================================
6151 * FUNCTION   : dumpMetadataToFile
6152 *
6153 * DESCRIPTION: Dumps tuning metadata to file system
6154 *
6155 * PARAMETERS :
6156 *   @meta           : tuning metadata
6157 *   @dumpFrameCount : current dump frame count
6158 *   @enabled        : Enable mask
6159 *
6160 *==========================================================================*/
6161void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6162                                                   uint32_t &dumpFrameCount,
6163                                                   bool enabled,
6164                                                   const char *type,
6165                                                   uint32_t frameNumber)
6166{
6167    //Some sanity checks
6168    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6169        LOGE("Tuning sensor data size bigger than expected %d: %d",
6170              meta.tuning_sensor_data_size,
6171              TUNING_SENSOR_DATA_MAX);
6172        return;
6173    }
6174
6175    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6176        LOGE("Tuning VFE data size bigger than expected %d: %d",
6177              meta.tuning_vfe_data_size,
6178              TUNING_VFE_DATA_MAX);
6179        return;
6180    }
6181
6182    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6183        LOGE("Tuning CPP data size bigger than expected %d: %d",
6184              meta.tuning_cpp_data_size,
6185              TUNING_CPP_DATA_MAX);
6186        return;
6187    }
6188
6189    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6190        LOGE("Tuning CAC data size bigger than expected %d: %d",
6191              meta.tuning_cac_data_size,
6192              TUNING_CAC_DATA_MAX);
6193        return;
6194    }
6195    //
6196
6197    if(enabled){
6198        char timeBuf[FILENAME_MAX];
6199        char buf[FILENAME_MAX];
6200        memset(buf, 0, sizeof(buf));
6201        memset(timeBuf, 0, sizeof(timeBuf));
6202        time_t current_time;
6203        struct tm * timeinfo;
6204        time (&current_time);
6205        timeinfo = localtime (&current_time);
6206        if (timeinfo != NULL) {
6207            /* Consistent naming for Jpeg+meta+raw: meta name */
6208            strftime (timeBuf, sizeof(timeBuf),
6209                    QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6210            /* Consistent naming for Jpeg+meta+raw: meta name end*/
6211        }
6212        String8 filePath(timeBuf);
6213         /* Consistent naming for Jpeg+meta+raw */
6214        snprintf(buf,
6215                sizeof(buf),
6216                "%dm_%s_%d.bin",
6217                dumpFrameCount,
6218                type,
6219                frameNumber);
6220         /* Consistent naming for Jpeg+meta+raw end */
6221        filePath.append(buf);
6222        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6223        if (file_fd >= 0) {
6224            ssize_t written_len = 0;
6225            meta.tuning_data_version = TUNING_DATA_VERSION;
6226            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6227            written_len += write(file_fd, data, sizeof(uint32_t));
6228            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6229            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6230            written_len += write(file_fd, data, sizeof(uint32_t));
6231            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6232            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6233            written_len += write(file_fd, data, sizeof(uint32_t));
6234            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6235            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6236            written_len += write(file_fd, data, sizeof(uint32_t));
6237            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6238            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6239            written_len += write(file_fd, data, sizeof(uint32_t));
6240            meta.tuning_mod3_data_size = 0;
6241            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6242            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6243            written_len += write(file_fd, data, sizeof(uint32_t));
6244            size_t total_size = meta.tuning_sensor_data_size;
6245            data = (void *)((uint8_t *)&meta.data);
6246            written_len += write(file_fd, data, total_size);
6247            total_size = meta.tuning_vfe_data_size;
6248            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6249            written_len += write(file_fd, data, total_size);
6250            total_size = meta.tuning_cpp_data_size;
6251            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6252            written_len += write(file_fd, data, total_size);
6253            total_size = meta.tuning_cac_data_size;
6254            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6255            written_len += write(file_fd, data, total_size);
6256            close(file_fd);
6257        }else {
6258            LOGE("fail to open file for metadata dumping");
6259        }
6260    }
6261}
6262
6263/*===========================================================================
6264 * FUNCTION   : cleanAndSortStreamInfo
6265 *
6266 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6267 *              and sort them such that raw stream is at the end of the list
6268 *              This is a workaround for camera daemon constraint.
6269 *
6270 * PARAMETERS : None
6271 *
6272 *==========================================================================*/
6273void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6274{
6275    List<stream_info_t *> newStreamInfo;
6276
6277    /*clean up invalid streams*/
6278    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6279            it != mStreamInfo.end();) {
6280        if(((*it)->status) == INVALID){
6281            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6282            delete channel;
6283            free(*it);
6284            it = mStreamInfo.erase(it);
6285        } else {
6286            it++;
6287        }
6288    }
6289
6290    // Move preview/video/callback/snapshot streams into newList
6291    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6292            it != mStreamInfo.end();) {
6293        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6294                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6295                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6296            newStreamInfo.push_back(*it);
6297            it = mStreamInfo.erase(it);
6298        } else
6299            it++;
6300    }
6301    // Move raw streams into newList
6302    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6303            it != mStreamInfo.end();) {
6304        newStreamInfo.push_back(*it);
6305        it = mStreamInfo.erase(it);
6306    }
6307
6308    mStreamInfo = newStreamInfo;
6309}
6310
6311/*===========================================================================
6312 * FUNCTION   : extractJpegMetadata
6313 *
6314 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6315 *              JPEG metadata is cached in HAL, and return as part of capture
6316 *              result when metadata is returned from camera daemon.
6317 *
6318 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6319 *              @request:      capture request
6320 *
6321 *==========================================================================*/
6322void QCamera3HardwareInterface::extractJpegMetadata(
6323        CameraMetadata& jpegMetadata,
6324        const camera3_capture_request_t *request)
6325{
6326    CameraMetadata frame_settings;
6327    frame_settings = request->settings;
6328
6329    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6330        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6331                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6332                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6333
6334    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6335        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6336                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6337                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6338
6339    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6340        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6341                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6342                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6343
6344    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6345        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6346                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6347                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6348
6349    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6350        jpegMetadata.update(ANDROID_JPEG_QUALITY,
6351                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6352                frame_settings.find(ANDROID_JPEG_QUALITY).count);
6353
6354    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6355        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6356                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6357                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6358
6359    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6360        int32_t thumbnail_size[2];
6361        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6362        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6363        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6364            int32_t orientation =
6365                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6366            if ((orientation == 90) || (orientation == 270)) {
6367               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6368               int32_t temp;
6369               temp = thumbnail_size[0];
6370               thumbnail_size[0] = thumbnail_size[1];
6371               thumbnail_size[1] = temp;
6372            }
6373         }
6374         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6375                thumbnail_size,
6376                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6377    }
6378
6379}
6380
6381/*===========================================================================
6382 * FUNCTION   : convertToRegions
6383 *
6384 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6385 *
6386 * PARAMETERS :
6387 *   @rect   : cam_rect_t struct to convert
6388 *   @region : int32_t destination array
6389 *   @weight : if we are converting from cam_area_t, weight is valid
6390 *             else weight = -1
6391 *
6392 *==========================================================================*/
6393void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6394        int32_t *region, int weight)
6395{
6396    region[0] = rect.left;
6397    region[1] = rect.top;
6398    region[2] = rect.left + rect.width;
6399    region[3] = rect.top + rect.height;
6400    if (weight > -1) {
6401        region[4] = weight;
6402    }
6403}
6404
6405/*===========================================================================
6406 * FUNCTION   : convertFromRegions
6407 *
6408 * DESCRIPTION: helper method to convert from array to cam_rect_t
6409 *
6410 * PARAMETERS :
6411 *   @rect   : cam_rect_t struct to convert
6412 *   @region : int32_t destination array
6413 *   @weight : if we are converting from cam_area_t, weight is valid
6414 *             else weight = -1
6415 *
6416 *==========================================================================*/
6417void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6418        const camera_metadata_t *settings, uint32_t tag)
6419{
6420    CameraMetadata frame_settings;
6421    frame_settings = settings;
6422    int32_t x_min = frame_settings.find(tag).data.i32[0];
6423    int32_t y_min = frame_settings.find(tag).data.i32[1];
6424    int32_t x_max = frame_settings.find(tag).data.i32[2];
6425    int32_t y_max = frame_settings.find(tag).data.i32[3];
6426    roi.weight = frame_settings.find(tag).data.i32[4];
6427    roi.rect.left = x_min;
6428    roi.rect.top = y_min;
6429    roi.rect.width = x_max - x_min;
6430    roi.rect.height = y_max - y_min;
6431}
6432
6433/*===========================================================================
6434 * FUNCTION   : resetIfNeededROI
6435 *
6436 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6437 *              crop region
6438 *
6439 * PARAMETERS :
6440 *   @roi       : cam_area_t struct to resize
6441 *   @scalerCropRegion : cam_crop_region_t region to compare against
6442 *
6443 *
6444 *==========================================================================*/
6445bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6446                                                 const cam_crop_region_t* scalerCropRegion)
6447{
6448    int32_t roi_x_max = roi->rect.width + roi->rect.left;
6449    int32_t roi_y_max = roi->rect.height + roi->rect.top;
6450    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6451    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6452
6453    /* According to spec weight = 0 is used to indicate roi needs to be disabled
6454     * without having this check the calculations below to validate if the roi
6455     * is inside scalar crop region will fail resulting in the roi not being
6456     * reset causing algorithm to continue to use stale roi window
6457     */
6458    if (roi->weight == 0) {
6459        return true;
6460    }
6461
6462    if ((roi_x_max < scalerCropRegion->left) ||
6463        // right edge of roi window is left of scalar crop's left edge
6464        (roi_y_max < scalerCropRegion->top)  ||
6465        // bottom edge of roi window is above scalar crop's top edge
6466        (roi->rect.left > crop_x_max) ||
6467        // left edge of roi window is beyond(right) of scalar crop's right edge
6468        (roi->rect.top > crop_y_max)){
6469        // top edge of roi windo is above scalar crop's top edge
6470        return false;
6471    }
6472    if (roi->rect.left < scalerCropRegion->left) {
6473        roi->rect.left = scalerCropRegion->left;
6474    }
6475    if (roi->rect.top < scalerCropRegion->top) {
6476        roi->rect.top = scalerCropRegion->top;
6477    }
6478    if (roi_x_max > crop_x_max) {
6479        roi_x_max = crop_x_max;
6480    }
6481    if (roi_y_max > crop_y_max) {
6482        roi_y_max = crop_y_max;
6483    }
6484    roi->rect.width = roi_x_max - roi->rect.left;
6485    roi->rect.height = roi_y_max - roi->rect.top;
6486    return true;
6487}
6488
6489/*===========================================================================
6490 * FUNCTION   : convertLandmarks
6491 *
6492 * DESCRIPTION: helper method to extract the landmarks from face detection info
6493 *
6494 * PARAMETERS :
6495 *   @landmark_data : input landmark data to be converted
6496 *   @landmarks : int32_t destination array
6497 *
6498 *
6499 *==========================================================================*/
6500void QCamera3HardwareInterface::convertLandmarks(
6501        cam_face_landmarks_info_t landmark_data,
6502        int32_t *landmarks)
6503{
6504    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6505    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6506    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6507    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6508    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6509    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6510}
6511
6512#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6513/*===========================================================================
6514 * FUNCTION   : initCapabilities
6515 *
6516 * DESCRIPTION: initialize camera capabilities in static data struct
6517 *
6518 * PARAMETERS :
6519 *   @cameraId  : camera Id
6520 *
6521 * RETURN     : int32_t type of status
6522 *              NO_ERROR  -- success
6523 *              none-zero failure code
6524 *==========================================================================*/
6525int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6526{
6527    int rc = 0;
6528    mm_camera_vtbl_t *cameraHandle = NULL;
6529    QCamera3HeapMemory *capabilityHeap = NULL;
6530
6531    rc = camera_open((uint8_t)cameraId, &cameraHandle);
6532    if (rc) {
6533        LOGE("camera_open failed. rc = %d", rc);
6534        goto open_failed;
6535    }
6536    if (!cameraHandle) {
6537        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6538        goto open_failed;
6539    }
6540
6541    capabilityHeap = new QCamera3HeapMemory(1);
6542    if (capabilityHeap == NULL) {
6543        LOGE("creation of capabilityHeap failed");
6544        goto heap_creation_failed;
6545    }
6546    /* Allocate memory for capability buffer */
6547    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6548    if(rc != OK) {
6549        LOGE("No memory for cappability");
6550        goto allocate_failed;
6551    }
6552
6553    /* Map memory for capability buffer */
6554    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6555    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6556                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
6557                                capabilityHeap->getFd(0),
6558                                sizeof(cam_capability_t),
6559                                capabilityHeap->getPtr(0));
6560    if(rc < 0) {
6561        LOGE("failed to map capability buffer");
6562        goto map_failed;
6563    }
6564
6565    /* Query Capability */
6566    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6567    if(rc < 0) {
6568        LOGE("failed to query capability");
6569        goto query_failed;
6570    }
6571    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6572    if (!gCamCapability[cameraId]) {
6573        LOGE("out of memory");
6574        goto query_failed;
6575    }
6576    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6577                                        sizeof(cam_capability_t));
6578
6579    int index;
6580    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6581        cam_analysis_info_t *p_analysis_info =
6582                &gCamCapability[cameraId]->analysis_info[index];
6583        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6584        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6585    }
6586    rc = 0;
6587
6588query_failed:
6589    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6590                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
6591map_failed:
6592    capabilityHeap->deallocate();
6593allocate_failed:
6594    delete capabilityHeap;
6595heap_creation_failed:
6596    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6597    cameraHandle = NULL;
6598open_failed:
6599    return rc;
6600}
6601
6602/*==========================================================================
6603 * FUNCTION   : get3Aversion
6604 *
6605 * DESCRIPTION: get the Q3A S/W version
6606 *
6607 * PARAMETERS :
6608 *  @sw_version: Reference of Q3A structure which will hold version info upon
6609 *               return
6610 *
6611 * RETURN     : None
6612 *
6613 *==========================================================================*/
6614void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6615{
6616    if(gCamCapability[mCameraId])
6617        sw_version = gCamCapability[mCameraId]->q3a_version;
6618    else
6619        LOGE("Capability structure NULL!");
6620}
6621
6622
6623/*===========================================================================
6624 * FUNCTION   : initParameters
6625 *
6626 * DESCRIPTION: initialize camera parameters
6627 *
6628 * PARAMETERS :
6629 *
6630 * RETURN     : int32_t type of status
6631 *              NO_ERROR  -- success
6632 *              none-zero failure code
6633 *==========================================================================*/
6634int QCamera3HardwareInterface::initParameters()
6635{
6636    int rc = 0;
6637
6638    //Allocate Set Param Buffer
6639    mParamHeap = new QCamera3HeapMemory(1);
6640    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6641    if(rc != OK) {
6642        rc = NO_MEMORY;
6643        LOGE("Failed to allocate SETPARM Heap memory");
6644        delete mParamHeap;
6645        mParamHeap = NULL;
6646        return rc;
6647    }
6648
6649    //Map memory for parameters buffer
6650    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6651            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6652            mParamHeap->getFd(0),
6653            sizeof(metadata_buffer_t),
6654            (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6655    if(rc < 0) {
6656        LOGE("failed to map SETPARM buffer");
6657        rc = FAILED_TRANSACTION;
6658        mParamHeap->deallocate();
6659        delete mParamHeap;
6660        mParamHeap = NULL;
6661        return rc;
6662    }
6663
6664    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6665
6666    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6667    return rc;
6668}
6669
6670/*===========================================================================
6671 * FUNCTION   : deinitParameters
6672 *
6673 * DESCRIPTION: de-initialize camera parameters
6674 *
6675 * PARAMETERS :
6676 *
6677 * RETURN     : NONE
6678 *==========================================================================*/
6679void QCamera3HardwareInterface::deinitParameters()
6680{
6681    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6682            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6683
6684    mParamHeap->deallocate();
6685    delete mParamHeap;
6686    mParamHeap = NULL;
6687
6688    mParameters = NULL;
6689
6690    free(mPrevParameters);
6691    mPrevParameters = NULL;
6692}
6693
6694/*===========================================================================
6695 * FUNCTION   : calcMaxJpegSize
6696 *
6697 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6698 *
6699 * PARAMETERS :
6700 *
6701 * RETURN     : max_jpeg_size
6702 *==========================================================================*/
6703size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6704{
6705    size_t max_jpeg_size = 0;
6706    size_t temp_width, temp_height;
6707    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6708            MAX_SIZES_CNT);
6709    for (size_t i = 0; i < count; i++) {
6710        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6711        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6712        if (temp_width * temp_height > max_jpeg_size ) {
6713            max_jpeg_size = temp_width * temp_height;
6714        }
6715    }
6716    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6717    return max_jpeg_size;
6718}
6719
6720/*===========================================================================
6721 * FUNCTION   : getMaxRawSize
6722 *
6723 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6724 *
6725 * PARAMETERS :
6726 *
6727 * RETURN     : Largest supported Raw Dimension
6728 *==========================================================================*/
6729cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6730{
6731    int max_width = 0;
6732    cam_dimension_t maxRawSize;
6733
6734    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6735    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6736        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6737            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6738            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6739        }
6740    }
6741    return maxRawSize;
6742}
6743
6744
6745/*===========================================================================
6746 * FUNCTION   : calcMaxJpegDim
6747 *
6748 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6749 *
6750 * PARAMETERS :
6751 *
6752 * RETURN     : max_jpeg_dim
6753 *==========================================================================*/
6754cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6755{
6756    cam_dimension_t max_jpeg_dim;
6757    cam_dimension_t curr_jpeg_dim;
6758    max_jpeg_dim.width = 0;
6759    max_jpeg_dim.height = 0;
6760    curr_jpeg_dim.width = 0;
6761    curr_jpeg_dim.height = 0;
6762    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6763        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6764        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6765        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6766            max_jpeg_dim.width * max_jpeg_dim.height ) {
6767            max_jpeg_dim.width = curr_jpeg_dim.width;
6768            max_jpeg_dim.height = curr_jpeg_dim.height;
6769        }
6770    }
6771    return max_jpeg_dim;
6772}
6773
6774/*===========================================================================
6775 * FUNCTION   : addStreamConfig
6776 *
6777 * DESCRIPTION: adds the stream configuration to the array
6778 *
6779 * PARAMETERS :
6780 * @available_stream_configs : pointer to stream configuration array
6781 * @scalar_format            : scalar format
6782 * @dim                      : configuration dimension
6783 * @config_type              : input or output configuration type
6784 *
6785 * RETURN     : NONE
6786 *==========================================================================*/
6787void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6788        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6789{
6790    available_stream_configs.add(scalar_format);
6791    available_stream_configs.add(dim.width);
6792    available_stream_configs.add(dim.height);
6793    available_stream_configs.add(config_type);
6794}
6795
6796/*===========================================================================
6797 * FUNCTION   : suppportBurstCapture
6798 *
6799 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6800 *
6801 * PARAMETERS :
6802 *   @cameraId  : camera Id
6803 *
6804 * RETURN     : true if camera supports BURST_CAPTURE
6805 *              false otherwise
6806 *==========================================================================*/
6807bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6808{
6809    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6810    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6811    const int32_t highResWidth = 3264;
6812    const int32_t highResHeight = 2448;
6813
6814    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6815        // Maximum resolution images cannot be captured at >= 10fps
6816        // -> not supporting BURST_CAPTURE
6817        return false;
6818    }
6819
6820    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6821        // Maximum resolution images can be captured at >= 20fps
6822        // --> supporting BURST_CAPTURE
6823        return true;
6824    }
6825
6826    // Find the smallest highRes resolution, or largest resolution if there is none
6827    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6828            MAX_SIZES_CNT);
6829    size_t highRes = 0;
6830    while ((highRes + 1 < totalCnt) &&
6831            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6832            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6833            highResWidth * highResHeight)) {
6834        highRes++;
6835    }
6836    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6837        return true;
6838    } else {
6839        return false;
6840    }
6841}
6842
6843/*===========================================================================
6844 * FUNCTION   : initStaticMetadata
6845 *
6846 * DESCRIPTION: initialize the static metadata
6847 *
6848 * PARAMETERS :
6849 *   @cameraId  : camera Id
6850 *
6851 * RETURN     : int32_t type of status
6852 *              0  -- success
6853 *              non-zero failure code
6854 *==========================================================================*/
6855int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6856{
6857    int rc = 0;
6858    CameraMetadata staticInfo;
6859    size_t count = 0;
6860    bool limitedDevice = false;
6861    char prop[PROPERTY_VALUE_MAX];
6862    bool supportBurst = false;
6863
6864    supportBurst = supportBurstCapture(cameraId);
6865
6866    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6867     * guaranteed or if min fps of max resolution is less than 20 fps, its
6868     * advertised as limited device*/
6869    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6870            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6871            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6872            !supportBurst;
6873
6874    uint8_t supportedHwLvl = limitedDevice ?
6875            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6876            // LEVEL_3 - This device will support level 3.
6877            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6878
6879    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6880            &supportedHwLvl, 1);
6881
6882    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6883    /*HAL 3 only*/
6884    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6885                    &gCamCapability[cameraId]->min_focus_distance, 1);
6886
6887    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6888                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6889
6890    /*should be using focal lengths but sensor doesn't provide that info now*/
6891    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6892                      &gCamCapability[cameraId]->focal_length,
6893                      1);
6894
6895    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6896            gCamCapability[cameraId]->apertures,
6897            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6898
6899    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6900            gCamCapability[cameraId]->filter_densities,
6901            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6902
6903
6904    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6905            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6906            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6907
6908    int32_t lens_shading_map_size[] = {
6909            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6910            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6911    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6912                      lens_shading_map_size,
6913                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6914
6915    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6916            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6917
6918    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6919            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6920
6921    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6922            &gCamCapability[cameraId]->max_frame_duration, 1);
6923
6924    camera_metadata_rational baseGainFactor = {
6925            gCamCapability[cameraId]->base_gain_factor.numerator,
6926            gCamCapability[cameraId]->base_gain_factor.denominator};
6927    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6928                      &baseGainFactor, 1);
6929
6930    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6931                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6932
6933    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6934            gCamCapability[cameraId]->pixel_array_size.height};
6935    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6936                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6937
6938    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6939            gCamCapability[cameraId]->active_array_size.top,
6940            gCamCapability[cameraId]->active_array_size.width,
6941            gCamCapability[cameraId]->active_array_size.height};
6942    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6943            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6944
6945    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6946            &gCamCapability[cameraId]->white_level, 1);
6947
6948    int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
6949    adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
6950            gCamCapability[cameraId]->color_arrangement);
6951    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6952            adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
6953
6954    bool hasBlackRegions = false;
6955    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6956        LOGW("black_region_count: %d is bounded to %d",
6957            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6958        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6959    }
6960    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6961        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6962        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6963            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6964        }
6965        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6966                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6967        hasBlackRegions = true;
6968    }
6969
6970    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6971            &gCamCapability[cameraId]->flash_charge_duration, 1);
6972
6973    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6974            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6975
6976    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
6977    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6978            &timestampSource, 1);
6979
6980    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6981            &gCamCapability[cameraId]->histogram_size, 1);
6982
6983    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6984            &gCamCapability[cameraId]->max_histogram_count, 1);
6985
6986    int32_t sharpness_map_size[] = {
6987            gCamCapability[cameraId]->sharpness_map_size.width,
6988            gCamCapability[cameraId]->sharpness_map_size.height};
6989
6990    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6991            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6992
6993    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6994            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6995
6996    int32_t scalar_formats[] = {
6997            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6998            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6999            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7000            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7001            HAL_PIXEL_FORMAT_RAW10,
7002            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7003    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7004    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7005                      scalar_formats,
7006                      scalar_formats_count);
7007
7008    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7009    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7010    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7011            count, MAX_SIZES_CNT, available_processed_sizes);
7012    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7013            available_processed_sizes, count * 2);
7014
7015    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7016    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7017    makeTable(gCamCapability[cameraId]->raw_dim,
7018            count, MAX_SIZES_CNT, available_raw_sizes);
7019    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7020            available_raw_sizes, count * 2);
7021
7022    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7023    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7024    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7025            count, MAX_SIZES_CNT, available_fps_ranges);
7026    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7027            available_fps_ranges, count * 2);
7028
7029    camera_metadata_rational exposureCompensationStep = {
7030            gCamCapability[cameraId]->exp_compensation_step.numerator,
7031            gCamCapability[cameraId]->exp_compensation_step.denominator};
7032    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7033                      &exposureCompensationStep, 1);
7034
7035    Vector<uint8_t> availableVstabModes;
7036    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7037    char eis_prop[PROPERTY_VALUE_MAX];
7038    memset(eis_prop, 0, sizeof(eis_prop));
7039    property_get("persist.camera.eis.enable", eis_prop, "0");
7040    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7041    if (facingBack && eis_prop_set) {
7042        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7043    }
7044    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7045                      availableVstabModes.array(), availableVstabModes.size());
7046
7047    /*HAL 1 and HAL 3 common*/
7048    float maxZoom = 4;
7049    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7050            &maxZoom, 1);
7051
7052    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7053    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7054
7055    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7056    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7057        max3aRegions[2] = 0; /* AF not supported */
7058    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7059            max3aRegions, 3);
7060
7061    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7062    memset(prop, 0, sizeof(prop));
7063    property_get("persist.camera.facedetect", prop, "1");
7064    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7065    LOGD("Support face detection mode: %d",
7066             supportedFaceDetectMode);
7067
7068    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
7069    Vector<uint8_t> availableFaceDetectModes;
7070    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7071    if (supportedFaceDetectMode == 1) {
7072        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7073    } else if (supportedFaceDetectMode == 2) {
7074        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7075    } else if (supportedFaceDetectMode == 3) {
7076        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7077        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7078    } else {
7079        maxFaces = 0;
7080    }
7081    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7082            availableFaceDetectModes.array(),
7083            availableFaceDetectModes.size());
7084    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7085            (int32_t *)&maxFaces, 1);
7086
7087    int32_t exposureCompensationRange[] = {
7088            gCamCapability[cameraId]->exposure_compensation_min,
7089            gCamCapability[cameraId]->exposure_compensation_max};
7090    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7091            exposureCompensationRange,
7092            sizeof(exposureCompensationRange)/sizeof(int32_t));
7093
7094    uint8_t lensFacing = (facingBack) ?
7095            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7096    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7097
7098    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7099                      available_thumbnail_sizes,
7100                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7101
7102    /*all sizes will be clubbed into this tag*/
7103    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7104    /*android.scaler.availableStreamConfigurations*/
7105    Vector<int32_t> available_stream_configs;
7106    cam_dimension_t active_array_dim;
7107    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7108    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7109    /* Add input/output stream configurations for each scalar formats*/
7110    for (size_t j = 0; j < scalar_formats_count; j++) {
7111        switch (scalar_formats[j]) {
7112        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7113        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7114        case HAL_PIXEL_FORMAT_RAW10:
7115            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7116                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7117                addStreamConfig(available_stream_configs, scalar_formats[j],
7118                        gCamCapability[cameraId]->raw_dim[i],
7119                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7120            }
7121            break;
7122        case HAL_PIXEL_FORMAT_BLOB:
7123            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7124                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7125                addStreamConfig(available_stream_configs, scalar_formats[j],
7126                        gCamCapability[cameraId]->picture_sizes_tbl[i],
7127                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7128            }
7129            break;
7130        case HAL_PIXEL_FORMAT_YCbCr_420_888:
7131        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7132        default:
7133            cam_dimension_t largest_picture_size;
7134            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7135            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7136                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7137                addStreamConfig(available_stream_configs, scalar_formats[j],
7138                        gCamCapability[cameraId]->picture_sizes_tbl[i],
7139                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7140                /* Book keep largest */
7141                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7142                        >= largest_picture_size.width &&
7143                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
7144                        >= largest_picture_size.height)
7145                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7146            }
7147            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7148            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7149                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7150                 addStreamConfig(available_stream_configs, scalar_formats[j],
7151                         largest_picture_size,
7152                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7153            }
7154            break;
7155        }
7156    }
7157
7158    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7159                      available_stream_configs.array(), available_stream_configs.size());
7160    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7161    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7162
7163    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7164    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7165
7166    /* android.scaler.availableMinFrameDurations */
7167    Vector<int64_t> available_min_durations;
7168    for (size_t j = 0; j < scalar_formats_count; j++) {
7169        switch (scalar_formats[j]) {
7170        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7171        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7172        case HAL_PIXEL_FORMAT_RAW10:
7173            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7174                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7175                available_min_durations.add(scalar_formats[j]);
7176                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7177                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7178                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7179            }
7180            break;
7181        default:
7182            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7183                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7184                available_min_durations.add(scalar_formats[j]);
7185                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7186                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7187                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7188            }
7189            break;
7190        }
7191    }
7192    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7193                      available_min_durations.array(), available_min_durations.size());
7194
7195    Vector<int32_t> available_hfr_configs;
7196    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7197        int32_t fps = 0;
7198        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7199        case CAM_HFR_MODE_60FPS:
7200            fps = 60;
7201            break;
7202        case CAM_HFR_MODE_90FPS:
7203            fps = 90;
7204            break;
7205        case CAM_HFR_MODE_120FPS:
7206            fps = 120;
7207            break;
7208        case CAM_HFR_MODE_150FPS:
7209            fps = 150;
7210            break;
7211        case CAM_HFR_MODE_180FPS:
7212            fps = 180;
7213            break;
7214        case CAM_HFR_MODE_210FPS:
7215            fps = 210;
7216            break;
7217        case CAM_HFR_MODE_240FPS:
7218            fps = 240;
7219            break;
7220        case CAM_HFR_MODE_480FPS:
7221            fps = 480;
7222            break;
7223        case CAM_HFR_MODE_OFF:
7224        case CAM_HFR_MODE_MAX:
7225        default:
7226            break;
7227        }
7228
7229        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7230        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7231            /* For each HFR frame rate, need to advertise one variable fps range
7232             * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7233             * and [120, 120]. While camcorder preview alone is running [30, 120] is
7234             * set by the app. When video recording is started, [120, 120] is
7235             * set. This way sensor configuration does not change when recording
7236             * is started */
7237
7238            /* (width, height, fps_min, fps_max, batch_size_max) */
7239            for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7240                j < MAX_SIZES_CNT; j++) {
7241                available_hfr_configs.add(
7242                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7243                available_hfr_configs.add(
7244                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7245                available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7246                available_hfr_configs.add(fps);
7247                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7248
7249                /* (width, height, fps_min, fps_max, batch_size_max) */
7250                available_hfr_configs.add(
7251                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7252                available_hfr_configs.add(
7253                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7254                available_hfr_configs.add(fps);
7255                available_hfr_configs.add(fps);
7256                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7257            }
7258       }
7259    }
7260    //Advertise HFR capability only if the property is set
7261    memset(prop, 0, sizeof(prop));
7262    property_get("persist.camera.hal3hfr.enable", prop, "1");
7263    uint8_t hfrEnable = (uint8_t)atoi(prop);
7264
7265    if(hfrEnable && available_hfr_configs.array()) {
7266        staticInfo.update(
7267                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7268                available_hfr_configs.array(), available_hfr_configs.size());
7269    }
7270
7271    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7272    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7273                      &max_jpeg_size, 1);
7274
7275    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7276    size_t size = 0;
7277    count = CAM_EFFECT_MODE_MAX;
7278    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7279    for (size_t i = 0; i < count; i++) {
7280        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7281                gCamCapability[cameraId]->supported_effects[i]);
7282        if (NAME_NOT_FOUND != val) {
7283            avail_effects[size] = (uint8_t)val;
7284            size++;
7285        }
7286    }
7287    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7288                      avail_effects,
7289                      size);
7290
7291    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7292    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7293    size_t supported_scene_modes_cnt = 0;
7294    count = CAM_SCENE_MODE_MAX;
7295    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7296    for (size_t i = 0; i < count; i++) {
7297        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7298                CAM_SCENE_MODE_OFF) {
7299            int val = lookupFwkName(SCENE_MODES_MAP,
7300                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
7301                    gCamCapability[cameraId]->supported_scene_modes[i]);
7302            if (NAME_NOT_FOUND != val) {
7303                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7304                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7305                supported_scene_modes_cnt++;
7306            }
7307        }
7308    }
7309    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7310                      avail_scene_modes,
7311                      supported_scene_modes_cnt);
7312
7313    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
7314    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7315                      supported_scene_modes_cnt,
7316                      CAM_SCENE_MODE_MAX,
7317                      scene_mode_overrides,
7318                      supported_indexes,
7319                      cameraId);
7320
7321    if (supported_scene_modes_cnt == 0) {
7322        supported_scene_modes_cnt = 1;
7323        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7324    }
7325
7326    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7327            scene_mode_overrides, supported_scene_modes_cnt * 3);
7328
7329    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7330                                         ANDROID_CONTROL_MODE_AUTO,
7331                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7332    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7333            available_control_modes,
7334            3);
7335
7336    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7337    size = 0;
7338    count = CAM_ANTIBANDING_MODE_MAX;
7339    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7340    for (size_t i = 0; i < count; i++) {
7341        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7342                gCamCapability[cameraId]->supported_antibandings[i]);
7343        if (NAME_NOT_FOUND != val) {
7344            avail_antibanding_modes[size] = (uint8_t)val;
7345            size++;
7346        }
7347
7348    }
7349    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7350                      avail_antibanding_modes,
7351                      size);
7352
7353    uint8_t avail_abberation_modes[] = {
7354            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7355            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7356            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7357    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7358    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7359    if (0 == count) {
7360        //  If no aberration correction modes are available for a device, this advertise OFF mode
7361        size = 1;
7362    } else {
7363        // If count is not zero then atleast one among the FAST or HIGH quality is supported
7364        // So, advertize all 3 modes if atleast any one mode is supported as per the
7365        // new M requirement
7366        size = 3;
7367    }
7368    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7369            avail_abberation_modes,
7370            size);
7371
7372    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7373    size = 0;
7374    count = CAM_FOCUS_MODE_MAX;
7375    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7376    for (size_t i = 0; i < count; i++) {
7377        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7378                gCamCapability[cameraId]->supported_focus_modes[i]);
7379        if (NAME_NOT_FOUND != val) {
7380            avail_af_modes[size] = (uint8_t)val;
7381            size++;
7382        }
7383    }
7384    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7385                      avail_af_modes,
7386                      size);
7387
7388    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7389    size = 0;
7390    count = CAM_WB_MODE_MAX;
7391    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7392    for (size_t i = 0; i < count; i++) {
7393        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7394                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7395                gCamCapability[cameraId]->supported_white_balances[i]);
7396        if (NAME_NOT_FOUND != val) {
7397            avail_awb_modes[size] = (uint8_t)val;
7398            size++;
7399        }
7400    }
7401    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7402                      avail_awb_modes,
7403                      size);
7404
7405    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7406    count = CAM_FLASH_FIRING_LEVEL_MAX;
7407    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7408            count);
7409    for (size_t i = 0; i < count; i++) {
7410        available_flash_levels[i] =
7411                gCamCapability[cameraId]->supported_firing_levels[i];
7412    }
7413    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7414            available_flash_levels, count);
7415
7416    uint8_t flashAvailable;
7417    if (gCamCapability[cameraId]->flash_available)
7418        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7419    else
7420        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7421    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7422            &flashAvailable, 1);
7423
7424    Vector<uint8_t> avail_ae_modes;
7425    count = CAM_AE_MODE_MAX;
7426    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7427    for (size_t i = 0; i < count; i++) {
7428        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7429    }
7430    if (flashAvailable) {
7431        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7432        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7433    }
7434    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7435                      avail_ae_modes.array(),
7436                      avail_ae_modes.size());
7437
7438    int32_t sensitivity_range[2];
7439    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7440    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7441    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7442                      sensitivity_range,
7443                      sizeof(sensitivity_range) / sizeof(int32_t));
7444
7445    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7446                      &gCamCapability[cameraId]->max_analog_sensitivity,
7447                      1);
7448
7449    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7450    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7451                      &sensor_orientation,
7452                      1);
7453
7454    int32_t max_output_streams[] = {
7455            MAX_STALLING_STREAMS,
7456            MAX_PROCESSED_STREAMS,
7457            MAX_RAW_STREAMS};
7458    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7459            max_output_streams,
7460            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7461
7462    uint8_t avail_leds = 0;
7463    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7464                      &avail_leds, 0);
7465
7466    uint8_t focus_dist_calibrated;
7467    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7468            gCamCapability[cameraId]->focus_dist_calibrated);
7469    if (NAME_NOT_FOUND != val) {
7470        focus_dist_calibrated = (uint8_t)val;
7471        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7472                     &focus_dist_calibrated, 1);
7473    }
7474
7475    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7476    size = 0;
7477    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7478            MAX_TEST_PATTERN_CNT);
7479    for (size_t i = 0; i < count; i++) {
7480        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7481                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7482        if (NAME_NOT_FOUND != testpatternMode) {
7483            avail_testpattern_modes[size] = testpatternMode;
7484            size++;
7485        }
7486    }
7487    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7488                      avail_testpattern_modes,
7489                      size);
7490
7491    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7492    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7493                      &max_pipeline_depth,
7494                      1);
7495
7496    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7497    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7498                      &partial_result_count,
7499                       1);
7500
7501    int32_t max_stall_duration = MAX_REPROCESS_STALL;
7502    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7503
7504    Vector<uint8_t> available_capabilities;
7505    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7506    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7507    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7508    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7509    if (supportBurst) {
7510        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7511    }
7512    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7513    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7514    if (hfrEnable && available_hfr_configs.array()) {
7515        available_capabilities.add(
7516                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7517    }
7518
7519    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7520        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7521    }
7522    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7523            available_capabilities.array(),
7524            available_capabilities.size());
7525
7526    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7527    //Assumption is that all bayer cameras support MANUAL_SENSOR.
7528    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7529            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7530
7531    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7532            &aeLockAvailable, 1);
7533
7534    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7535    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7536    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7537            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7538
7539    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7540            &awbLockAvailable, 1);
7541
7542    int32_t max_input_streams = 1;
7543    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7544                      &max_input_streams,
7545                      1);
7546
7547    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7548    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7549            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7550            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7551            HAL_PIXEL_FORMAT_YCbCr_420_888};
7552    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7553                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7554
7555    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7556    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7557                      &max_latency,
7558                      1);
7559
7560    int32_t isp_sensitivity_range[2];
7561    isp_sensitivity_range[0] =
7562        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7563    isp_sensitivity_range[1] =
7564        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7565    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7566                      isp_sensitivity_range,
7567                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7568
7569    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7570                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7571    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7572            available_hot_pixel_modes,
7573            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7574
7575    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7576                                         ANDROID_SHADING_MODE_FAST,
7577                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
7578    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7579                      available_shading_modes,
7580                      3);
7581
7582    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7583                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7584    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7585                      available_lens_shading_map_modes,
7586                      2);
7587
7588    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7589                                      ANDROID_EDGE_MODE_FAST,
7590                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
7591                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7592    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7593            available_edge_modes,
7594            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7595
7596    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7597                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
7598                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7599                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7600                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7601    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7602            available_noise_red_modes,
7603            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7604
7605    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7606                                         ANDROID_TONEMAP_MODE_FAST,
7607                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7608    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7609            available_tonemap_modes,
7610            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7611
7612    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7613    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7614            available_hot_pixel_map_modes,
7615            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7616
7617    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7618            gCamCapability[cameraId]->reference_illuminant1);
7619    if (NAME_NOT_FOUND != val) {
7620        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7621        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7622    }
7623
7624    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7625            gCamCapability[cameraId]->reference_illuminant2);
7626    if (NAME_NOT_FOUND != val) {
7627        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7628        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7629    }
7630
7631    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7632            (void *)gCamCapability[cameraId]->forward_matrix1,
7633            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7634
7635    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7636            (void *)gCamCapability[cameraId]->forward_matrix2,
7637            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7638
7639    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7640            (void *)gCamCapability[cameraId]->color_transform1,
7641            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7642
7643    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7644            (void *)gCamCapability[cameraId]->color_transform2,
7645            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7646
7647    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7648            (void *)gCamCapability[cameraId]->calibration_transform1,
7649            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7650
7651    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7652            (void *)gCamCapability[cameraId]->calibration_transform2,
7653            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7654
7655    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7656       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7657       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7658       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7659       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7660       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7661       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7662       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7663       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7664       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7665       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7666       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7667       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7668       ANDROID_JPEG_GPS_COORDINATES,
7669       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7670       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7671       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7672       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7673       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7674       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7675       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7676       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7677       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7678       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7679       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7680       ANDROID_STATISTICS_FACE_DETECT_MODE,
7681       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7682       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7683       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7684       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7685       /* DevCamDebug metadata request_keys_basic */
7686       DEVCAMDEBUG_META_ENABLE,
7687       /* DevCamDebug metadata end */
7688       };
7689
7690    size_t request_keys_cnt =
7691            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7692    Vector<int32_t> available_request_keys;
7693    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7694    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7695        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7696    }
7697
7698    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7699            available_request_keys.array(), available_request_keys.size());
7700
7701    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7702       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7703       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7704       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7705       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7706       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7707       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7708       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7709       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7710       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7711       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7712       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7713       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7714       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7715       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7716       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7717       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7718       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7719       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7720       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7721       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7722       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7723       ANDROID_STATISTICS_FACE_SCORES,
7724       NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7725       NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
7726       // DevCamDebug metadata result_keys_basic
7727       DEVCAMDEBUG_META_ENABLE,
7728       // DevCamDebug metadata result_keys AF
7729       DEVCAMDEBUG_AF_LENS_POSITION,
7730       DEVCAMDEBUG_AF_TOF_CONFIDENCE,
7731       DEVCAMDEBUG_AF_TOF_DISTANCE,
7732       DEVCAMDEBUG_AF_LUMA,
7733       DEVCAMDEBUG_AF_HAF_STATE,
7734       DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7735       DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7736       DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7737       DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7738       DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7739       DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7740       DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7741       DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7742       DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7743       DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7744       DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7745       DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7746       DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7747       DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7748       DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7749       DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7750       DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7751       DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7752       DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7753       DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7754       DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7755       DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7756       // DevCamDebug metadata result_keys AEC
7757       DEVCAMDEBUG_AEC_TARGET_LUMA,
7758       DEVCAMDEBUG_AEC_COMP_LUMA,
7759       DEVCAMDEBUG_AEC_AVG_LUMA,
7760       DEVCAMDEBUG_AEC_CUR_LUMA,
7761       DEVCAMDEBUG_AEC_LINECOUNT,
7762       DEVCAMDEBUG_AEC_REAL_GAIN,
7763       DEVCAMDEBUG_AEC_EXP_INDEX,
7764       DEVCAMDEBUG_AEC_LUX_IDX,
7765       // DevCamDebug metadata result_keys AWB
7766       DEVCAMDEBUG_AWB_R_GAIN,
7767       DEVCAMDEBUG_AWB_G_GAIN,
7768       DEVCAMDEBUG_AWB_B_GAIN,
7769       DEVCAMDEBUG_AWB_CCT,
7770       DEVCAMDEBUG_AWB_DECISION,
7771       /* DevCamDebug metadata end */
7772       };
7773    size_t result_keys_cnt =
7774            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7775
7776    Vector<int32_t> available_result_keys;
7777    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7778    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7779        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7780    }
7781    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7782        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7783        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7784    }
7785    if (supportedFaceDetectMode == 1) {
7786        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7787        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7788    } else if ((supportedFaceDetectMode == 2) ||
7789            (supportedFaceDetectMode == 3)) {
7790        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7791        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7792    }
7793    if (hasBlackRegions) {
7794        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7795        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7796    }
7797    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7798            available_result_keys.array(), available_result_keys.size());
7799
7800    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7801       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7802       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7803       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7804       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7805       ANDROID_SCALER_CROPPING_TYPE,
7806       ANDROID_SYNC_MAX_LATENCY,
7807       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7808       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7809       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7810       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7811       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7812       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7813       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7814       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7815       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7816       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7817       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7818       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7819       ANDROID_LENS_FACING,
7820       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7821       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7822       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7823       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7824       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7825       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7826       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7827       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7828       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7829       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7830       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7831       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7832       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7833       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7834       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7835       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7836       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7837       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7838       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7839       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7840       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7841       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7842       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7843       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7844       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7845       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7846       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7847       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7848       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7849       ANDROID_CONTROL_AVAILABLE_MODES,
7850       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7851       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7852       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7853       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7854       ANDROID_SHADING_AVAILABLE_MODES,
7855       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7856       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7857
7858    Vector<int32_t> available_characteristics_keys;
7859    available_characteristics_keys.appendArray(characteristics_keys_basic,
7860            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7861    if (hasBlackRegions) {
7862        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7863    }
7864    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7865                      available_characteristics_keys.array(),
7866                      available_characteristics_keys.size());
7867
7868    /*available stall durations depend on the hw + sw and will be different for different devices */
7869    /*have to add for raw after implementation*/
7870    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7871    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7872
7873    Vector<int64_t> available_stall_durations;
7874    for (uint32_t j = 0; j < stall_formats_count; j++) {
7875        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7876            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7877                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7878                available_stall_durations.add(stall_formats[j]);
7879                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7880                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7881                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7882          }
7883        } else {
7884            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7885                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7886                available_stall_durations.add(stall_formats[j]);
7887                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7888                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7889                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7890            }
7891        }
7892    }
7893    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7894                      available_stall_durations.array(),
7895                      available_stall_durations.size());
7896
7897    //QCAMERA3_OPAQUE_RAW
7898    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7899    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7900    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7901    case LEGACY_RAW:
7902        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7903            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7904        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7905            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7906        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7907            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7908        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7909        break;
7910    case MIPI_RAW:
7911        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7912            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7913        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7914            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7915        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7916            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7917        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7918        break;
7919    default:
7920        LOGE("unknown opaque_raw_format %d",
7921                gCamCapability[cameraId]->opaque_raw_fmt);
7922        break;
7923    }
7924    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7925
7926    Vector<int32_t> strides;
7927    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7928            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7929        cam_stream_buf_plane_info_t buf_planes;
7930        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7931        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7932        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7933            &gCamCapability[cameraId]->padding_info, &buf_planes);
7934        strides.add(buf_planes.plane_info.mp[0].stride);
7935    }
7936    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7937            strides.size());
7938
7939    Vector<int32_t> opaque_size;
7940    for (size_t j = 0; j < scalar_formats_count; j++) {
7941        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7942            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7943                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7944                cam_stream_buf_plane_info_t buf_planes;
7945
7946                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7947                         &gCamCapability[cameraId]->padding_info, &buf_planes);
7948
7949                if (rc == 0) {
7950                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7951                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7952                    opaque_size.add(buf_planes.plane_info.frame_len);
7953                }else {
7954                    LOGE("raw frame calculation failed!");
7955                }
7956            }
7957        }
7958    }
7959
7960    if ((opaque_size.size() > 0) &&
7961            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7962        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7963    else
7964        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7965
7966    gStaticMetadata[cameraId] = staticInfo.release();
7967    return rc;
7968}
7969
7970/*===========================================================================
7971 * FUNCTION   : makeTable
7972 *
7973 * DESCRIPTION: make a table of sizes
7974 *
7975 * PARAMETERS :
7976 *
7977 *
7978 *==========================================================================*/
7979void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7980        size_t max_size, int32_t *sizeTable)
7981{
7982    size_t j = 0;
7983    if (size > max_size) {
7984       size = max_size;
7985    }
7986    for (size_t i = 0; i < size; i++) {
7987        sizeTable[j] = dimTable[i].width;
7988        sizeTable[j+1] = dimTable[i].height;
7989        j+=2;
7990    }
7991}
7992
7993/*===========================================================================
7994 * FUNCTION   : makeFPSTable
7995 *
7996 * DESCRIPTION: make a table of fps ranges
7997 *
7998 * PARAMETERS :
7999 *
8000 *==========================================================================*/
8001void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8002        size_t max_size, int32_t *fpsRangesTable)
8003{
8004    size_t j = 0;
8005    if (size > max_size) {
8006       size = max_size;
8007    }
8008    for (size_t i = 0; i < size; i++) {
8009        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8010        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8011        j+=2;
8012    }
8013}
8014
8015/*===========================================================================
8016 * FUNCTION   : makeOverridesList
8017 *
8018 * DESCRIPTION: make a list of scene mode overrides
8019 *
8020 * PARAMETERS :
8021 *
8022 *
8023 *==========================================================================*/
8024void QCamera3HardwareInterface::makeOverridesList(
8025        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8026        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8027{
8028    /*daemon will give a list of overrides for all scene modes.
8029      However we should send the fwk only the overrides for the scene modes
8030      supported by the framework*/
8031    size_t j = 0;
8032    if (size > max_size) {
8033       size = max_size;
8034    }
8035    size_t focus_count = CAM_FOCUS_MODE_MAX;
8036    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8037            focus_count);
8038    for (size_t i = 0; i < size; i++) {
8039        bool supt = false;
8040        size_t index = supported_indexes[i];
8041        overridesList[j] = gCamCapability[camera_id]->flash_available ?
8042                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8043        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8044                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8045                overridesTable[index].awb_mode);
8046        if (NAME_NOT_FOUND != val) {
8047            overridesList[j+1] = (uint8_t)val;
8048        }
8049        uint8_t focus_override = overridesTable[index].af_mode;
8050        for (size_t k = 0; k < focus_count; k++) {
8051           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8052              supt = true;
8053              break;
8054           }
8055        }
8056        if (supt) {
8057            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8058                    focus_override);
8059            if (NAME_NOT_FOUND != val) {
8060                overridesList[j+2] = (uint8_t)val;
8061            }
8062        } else {
8063           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8064        }
8065        j+=3;
8066    }
8067}
8068
8069/*===========================================================================
8070 * FUNCTION   : filterJpegSizes
8071 *
8072 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8073 *              could be downscaled to
8074 *
8075 * PARAMETERS :
8076 *
8077 * RETURN     : length of jpegSizes array
8078 *==========================================================================*/
8079
8080size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8081        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8082        uint8_t downscale_factor)
8083{
8084    if (0 == downscale_factor) {
8085        downscale_factor = 1;
8086    }
8087
8088    int32_t min_width = active_array_size.width / downscale_factor;
8089    int32_t min_height = active_array_size.height / downscale_factor;
8090    size_t jpegSizesCnt = 0;
8091    if (processedSizesCnt > maxCount) {
8092        processedSizesCnt = maxCount;
8093    }
8094    for (size_t i = 0; i < processedSizesCnt; i+=2) {
8095        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8096            jpegSizes[jpegSizesCnt] = processedSizes[i];
8097            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8098            jpegSizesCnt += 2;
8099        }
8100    }
8101    return jpegSizesCnt;
8102}
8103
8104/*===========================================================================
8105 * FUNCTION   : computeNoiseModelEntryS
8106 *
8107 * DESCRIPTION: function to map a given sensitivity to the S noise
8108 *              model parameters in the DNG noise model.
8109 *
8110 * PARAMETERS : sens : the sensor sensitivity
8111 *
8112 ** RETURN    : S (sensor amplification) noise
8113 *
8114 *==========================================================================*/
8115double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8116    double s = gCamCapability[mCameraId]->gradient_S * sens +
8117            gCamCapability[mCameraId]->offset_S;
8118    return ((s < 0.0) ? 0.0 : s);
8119}
8120
8121/*===========================================================================
8122 * FUNCTION   : computeNoiseModelEntryO
8123 *
8124 * DESCRIPTION: function to map a given sensitivity to the O noise
8125 *              model parameters in the DNG noise model.
8126 *
8127 * PARAMETERS : sens : the sensor sensitivity
8128 *
8129 ** RETURN    : O (sensor readout) noise
8130 *
8131 *==========================================================================*/
8132double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8133    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8134    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8135            1.0 : (1.0 * sens / max_analog_sens);
8136    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8137            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8138    return ((o < 0.0) ? 0.0 : o);
8139}
8140
8141/*===========================================================================
8142 * FUNCTION   : getSensorSensitivity
8143 *
8144 * DESCRIPTION: convert iso_mode to an integer value
8145 *
8146 * PARAMETERS : iso_mode : the iso_mode supported by sensor
8147 *
8148 ** RETURN    : sensitivity supported by sensor
8149 *
8150 *==========================================================================*/
8151int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8152{
8153    int32_t sensitivity;
8154
8155    switch (iso_mode) {
8156    case CAM_ISO_MODE_100:
8157        sensitivity = 100;
8158        break;
8159    case CAM_ISO_MODE_200:
8160        sensitivity = 200;
8161        break;
8162    case CAM_ISO_MODE_400:
8163        sensitivity = 400;
8164        break;
8165    case CAM_ISO_MODE_800:
8166        sensitivity = 800;
8167        break;
8168    case CAM_ISO_MODE_1600:
8169        sensitivity = 1600;
8170        break;
8171    default:
8172        sensitivity = -1;
8173        break;
8174    }
8175    return sensitivity;
8176}
8177
8178/*===========================================================================
8179 * FUNCTION   : getCamInfo
8180 *
8181 * DESCRIPTION: query camera capabilities
8182 *
8183 * PARAMETERS :
8184 *   @cameraId  : camera Id
8185 *   @info      : camera info struct to be filled in with camera capabilities
8186 *
8187 * RETURN     : int type of status
8188 *              NO_ERROR  -- success
8189 *              none-zero failure code
8190 *==========================================================================*/
8191int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8192        struct camera_info *info)
8193{
8194    ATRACE_CALL();
8195    int rc = 0;
8196
8197    pthread_mutex_lock(&gCamLock);
8198    if (NULL == gCamCapability[cameraId]) {
8199        rc = initCapabilities(cameraId);
8200        if (rc < 0) {
8201            pthread_mutex_unlock(&gCamLock);
8202            return rc;
8203        }
8204    }
8205
8206    if (NULL == gStaticMetadata[cameraId]) {
8207        rc = initStaticMetadata(cameraId);
8208        if (rc < 0) {
8209            pthread_mutex_unlock(&gCamLock);
8210            return rc;
8211        }
8212    }
8213
8214    switch(gCamCapability[cameraId]->position) {
8215    case CAM_POSITION_BACK:
8216        info->facing = CAMERA_FACING_BACK;
8217        break;
8218
8219    case CAM_POSITION_FRONT:
8220        info->facing = CAMERA_FACING_FRONT;
8221        break;
8222
8223    default:
8224        LOGE("Unknown position type for camera id:%d", cameraId);
8225        rc = -1;
8226        break;
8227    }
8228
8229
8230    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8231    info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8232    info->static_camera_characteristics = gStaticMetadata[cameraId];
8233
8234    //For now assume both cameras can operate independently.
8235    info->conflicting_devices = NULL;
8236    info->conflicting_devices_length = 0;
8237
8238    //resource cost is 100 * MIN(1.0, m/M),
8239    //where m is throughput requirement with maximum stream configuration
8240    //and M is CPP maximum throughput.
8241    float max_fps = 0.0;
8242    for (uint32_t i = 0;
8243            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8244        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8245            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8246    }
8247    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8248            gCamCapability[cameraId]->active_array_size.width *
8249            gCamCapability[cameraId]->active_array_size.height * max_fps /
8250            gCamCapability[cameraId]->max_pixel_bandwidth;
8251    info->resource_cost = 100 * MIN(1.0, ratio);
8252    LOGI("camera %d resource cost is %d", cameraId,
8253            info->resource_cost);
8254
8255    pthread_mutex_unlock(&gCamLock);
8256    return rc;
8257}
8258
8259/*===========================================================================
8260 * FUNCTION   : translateCapabilityToMetadata
8261 *
8262 * DESCRIPTION: translate the capability into camera_metadata_t
8263 *
8264 * PARAMETERS : type of the request
8265 *
8266 *
8267 * RETURN     : success: camera_metadata_t*
8268 *              failure: NULL
8269 *
8270 *==========================================================================*/
8271camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8272{
8273    if (mDefaultMetadata[type] != NULL) {
8274        return mDefaultMetadata[type];
8275    }
8276    //first time we are handling this request
8277    //fill up the metadata structure using the wrapper class
8278    CameraMetadata settings;
8279    //translate from cam_capability_t to camera_metadata_tag_t
8280    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8281    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8282    int32_t defaultRequestID = 0;
8283    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8284
8285    /* OIS disable */
8286    char ois_prop[PROPERTY_VALUE_MAX];
8287    memset(ois_prop, 0, sizeof(ois_prop));
8288    property_get("persist.camera.ois.disable", ois_prop, "0");
8289    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8290
8291    /* Force video to use OIS */
8292    char videoOisProp[PROPERTY_VALUE_MAX];
8293    memset(videoOisProp, 0, sizeof(videoOisProp));
8294    property_get("persist.camera.ois.video", videoOisProp, "1");
8295    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8296
8297    // EIS enable/disable
8298    char eis_prop[PROPERTY_VALUE_MAX];
8299    memset(eis_prop, 0, sizeof(eis_prop));
8300    property_get("persist.camera.eis.enable", eis_prop, "0");
8301    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
8302
8303    // Hybrid AE enable/disable
8304    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8305    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8306    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8307    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8308
8309    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
8310    // This is a bit hacky. EIS is enabled only when the above setprop
8311    // is set to non-zero value and on back camera (for 2015 Nexus).
8312    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
8313    // configureStream is called before this function. In other words,
8314    // we cannot guarantee the app will call configureStream before
8315    // calling createDefaultRequest.
8316    const bool eisEnabled = facingBack && eis_prop_set;
8317
8318    uint8_t controlIntent = 0;
8319    uint8_t focusMode;
8320    uint8_t vsMode;
8321    uint8_t optStabMode;
8322    uint8_t cacMode;
8323    uint8_t edge_mode;
8324    uint8_t noise_red_mode;
8325    uint8_t tonemap_mode;
8326    bool highQualityModeEntryAvailable = FALSE;
8327    bool fastModeEntryAvailable = FALSE;
8328    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8329    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8330    switch (type) {
8331      case CAMERA3_TEMPLATE_PREVIEW:
8332        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8333        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8334        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8335        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8336        edge_mode = ANDROID_EDGE_MODE_FAST;
8337        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8338        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8339        break;
8340      case CAMERA3_TEMPLATE_STILL_CAPTURE:
8341        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8342        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8343        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8344        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8345        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8346        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8347        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8348        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8349        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8350            if (gCamCapability[mCameraId]->aberration_modes[i] ==
8351                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8352                highQualityModeEntryAvailable = TRUE;
8353            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8354                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8355                fastModeEntryAvailable = TRUE;
8356            }
8357        }
8358        if (highQualityModeEntryAvailable) {
8359            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8360        } else if (fastModeEntryAvailable) {
8361            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8362        }
8363        break;
8364      case CAMERA3_TEMPLATE_VIDEO_RECORD:
8365        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8366        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8367        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8368        if (eisEnabled) {
8369            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8370        }
8371        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8372        edge_mode = ANDROID_EDGE_MODE_FAST;
8373        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8374        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8375        if (forceVideoOis)
8376            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8377        break;
8378      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8379        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8380        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8381        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8382        if (eisEnabled) {
8383            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8384        }
8385        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8386        edge_mode = ANDROID_EDGE_MODE_FAST;
8387        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8388        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8389        if (forceVideoOis)
8390            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8391        break;
8392      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8393        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8394        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8395        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8396        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8397        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8398        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8399        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8400        break;
8401      case CAMERA3_TEMPLATE_MANUAL:
8402        edge_mode = ANDROID_EDGE_MODE_FAST;
8403        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8404        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8405        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8406        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8407        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8408        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8409        break;
8410      default:
8411        edge_mode = ANDROID_EDGE_MODE_FAST;
8412        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8413        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8414        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8415        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8416        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8417        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8418        break;
8419    }
8420    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8421    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8422    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8423    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8424        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8425    }
8426    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8427
8428    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8429            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8430        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8431    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8432            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8433            || ois_disable)
8434        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8435    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8436
8437    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8438            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8439
8440    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8441    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8442
8443    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8444    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8445
8446    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8447    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8448
8449    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8450    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8451
8452    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8453    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8454
8455    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8456    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8457
8458    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8459    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8460
8461    /*flash*/
8462    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8463    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8464
8465    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8466    settings.update(ANDROID_FLASH_FIRING_POWER,
8467            &flashFiringLevel, 1);
8468
8469    /* lens */
8470    float default_aperture = gCamCapability[mCameraId]->apertures[0];
8471    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8472
8473    if (gCamCapability[mCameraId]->filter_densities_count) {
8474        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8475        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8476                        gCamCapability[mCameraId]->filter_densities_count);
8477    }
8478
8479    float default_focal_length = gCamCapability[mCameraId]->focal_length;
8480    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8481
8482    if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8483        float default_focus_distance = 0;
8484        settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8485    }
8486
8487    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8488    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8489
8490    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8491    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8492
8493    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8494    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8495
8496    /* face detection (default to OFF) */
8497    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8498    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8499
8500    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8501    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8502
8503    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8504    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8505
8506    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8507    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8508
8509    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8510    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
8511
8512    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8513    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8514
8515    /* Exposure time(Update the Min Exposure Time)*/
8516    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8517    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8518
8519    /* frame duration */
8520    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8521    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8522
8523    /* sensitivity */
8524    static const int32_t default_sensitivity = 100;
8525    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8526    static const int32_t default_isp_sensitivity =
8527            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8528    settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8529
8530    /*edge mode*/
8531    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8532
8533    /*noise reduction mode*/
8534    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8535
8536    /*color correction mode*/
8537    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8538    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8539
8540    /*transform matrix mode*/
8541    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8542
8543    int32_t scaler_crop_region[4];
8544    scaler_crop_region[0] = 0;
8545    scaler_crop_region[1] = 0;
8546    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8547    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8548    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8549
8550    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8551    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8552
8553    /*focus distance*/
8554    float focus_distance = 0.0;
8555    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8556
8557    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8558    /* Restrict default preview template to max 30 fps */
8559    float max_range = 0.0;
8560    float max_fixed_fps = 0.0;
8561    int32_t fps_range[2] = {0, 0};
8562    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8563            i++) {
8564        if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8565                TEMPLATE_MAX_PREVIEW_FPS) {
8566            continue;
8567        }
8568        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8569            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8570        if (type == CAMERA3_TEMPLATE_PREVIEW ||
8571                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8572                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8573            if (range > max_range) {
8574                fps_range[0] =
8575                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8576                fps_range[1] =
8577                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8578                max_range = range;
8579            }
8580        } else {
8581            if (range < 0.01 && max_fixed_fps <
8582                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8583                fps_range[0] =
8584                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8585                fps_range[1] =
8586                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8587                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8588            }
8589        }
8590    }
8591    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8592
8593    /*precapture trigger*/
8594    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8595    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8596
8597    /*af trigger*/
8598    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8599    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8600
8601    /* ae & af regions */
8602    int32_t active_region[] = {
8603            gCamCapability[mCameraId]->active_array_size.left,
8604            gCamCapability[mCameraId]->active_array_size.top,
8605            gCamCapability[mCameraId]->active_array_size.left +
8606                    gCamCapability[mCameraId]->active_array_size.width,
8607            gCamCapability[mCameraId]->active_array_size.top +
8608                    gCamCapability[mCameraId]->active_array_size.height,
8609            0};
8610    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8611            sizeof(active_region) / sizeof(active_region[0]));
8612    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8613            sizeof(active_region) / sizeof(active_region[0]));
8614
8615    /* black level lock */
8616    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8617    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8618
8619    /* lens shading map mode */
8620    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8621    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8622        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8623    }
8624    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8625
8626    //special defaults for manual template
8627    if (type == CAMERA3_TEMPLATE_MANUAL) {
8628        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8629        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8630
8631        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8632        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8633
8634        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8635        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8636
8637        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8638        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8639
8640        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8641        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8642
8643        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8644        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8645    }
8646
8647
8648    /* TNR
8649     * We'll use this location to determine which modes TNR will be set.
8650     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8651     * This is not to be confused with linking on a per stream basis that decision
8652     * is still on per-session basis and will be handled as part of config stream
8653     */
8654    uint8_t tnr_enable = 0;
8655
8656    if (m_bTnrPreview || m_bTnrVideo) {
8657
8658        switch (type) {
8659            case CAMERA3_TEMPLATE_VIDEO_RECORD:
8660                    tnr_enable = 1;
8661                    break;
8662
8663            default:
8664                    tnr_enable = 0;
8665                    break;
8666        }
8667
8668        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8669        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8670        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8671
8672        LOGD("TNR:%d with process plate %d for template:%d",
8673                             tnr_enable, tnr_process_type, type);
8674    }
8675
8676    //Update Link tags to default
8677    int32_t sync_type = CAM_TYPE_STANDALONE;
8678    settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8679
8680    int32_t is_main = 0; //this doesn't matter as app should overwrite
8681    settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8682
8683    settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8684
8685    /* CDS default */
8686    char prop[PROPERTY_VALUE_MAX];
8687    memset(prop, 0, sizeof(prop));
8688    property_get("persist.camera.CDS", prop, "Auto");
8689    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8690    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8691    if (CAM_CDS_MODE_MAX == cds_mode) {
8692        cds_mode = CAM_CDS_MODE_AUTO;
8693    }
8694
8695    /* Disabling CDS in templates which have TNR enabled*/
8696    if (tnr_enable)
8697        cds_mode = CAM_CDS_MODE_OFF;
8698
8699    int32_t mode = cds_mode;
8700    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8701
8702    /* hybrid ae */
8703    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8704
8705    mDefaultMetadata[type] = settings.release();
8706
8707    return mDefaultMetadata[type];
8708}
8709
8710/*===========================================================================
8711 * FUNCTION   : setFrameParameters
8712 *
8713 * DESCRIPTION: set parameters per frame as requested in the metadata from
8714 *              framework
8715 *
8716 * PARAMETERS :
8717 *   @request   : request that needs to be serviced
8718 *   @streamsArray : Stream ID of all the requested streams
8719 *   @blob_request: Whether this request is a blob request or not
8720 *
8721 * RETURN     : success: NO_ERROR
8722 *              failure:
8723 *==========================================================================*/
8724int QCamera3HardwareInterface::setFrameParameters(
8725                    camera3_capture_request_t *request,
8726                    cam_stream_ID_t streamsArray,
8727                    int blob_request,
8728                    uint32_t snapshotStreamId)
8729{
8730    /*translate from camera_metadata_t type to parm_type_t*/
8731    int rc = 0;
8732    int32_t hal_version = CAM_HAL_V3;
8733
8734    clear_metadata_buffer(mParameters);
8735    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8736        LOGE("Failed to set hal version in the parameters");
8737        return BAD_VALUE;
8738    }
8739
8740    /*we need to update the frame number in the parameters*/
8741    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8742            request->frame_number)) {
8743        LOGE("Failed to set the frame number in the parameters");
8744        return BAD_VALUE;
8745    }
8746
8747    /* Update stream id of all the requested buffers */
8748    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
8749        LOGE("Failed to set stream type mask in the parameters");
8750        return BAD_VALUE;
8751    }
8752
8753    if (mUpdateDebugLevel) {
8754        uint32_t dummyDebugLevel = 0;
8755        /* The value of dummyDebugLevel is irrelavent. On
8756         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8757        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8758                dummyDebugLevel)) {
8759            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8760            return BAD_VALUE;
8761        }
8762        mUpdateDebugLevel = false;
8763    }
8764
8765    if(request->settings != NULL){
8766        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8767        if (blob_request)
8768            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8769    }
8770
8771    return rc;
8772}
8773
8774/*===========================================================================
8775 * FUNCTION   : setReprocParameters
8776 *
8777 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8778 *              return it.
8779 *
8780 * PARAMETERS :
8781 *   @request   : request that needs to be serviced
8782 *
8783 * RETURN     : success: NO_ERROR
8784 *              failure:
8785 *==========================================================================*/
8786int32_t QCamera3HardwareInterface::setReprocParameters(
8787        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8788        uint32_t snapshotStreamId)
8789{
8790    /*translate from camera_metadata_t type to parm_type_t*/
8791    int rc = 0;
8792
8793    if (NULL == request->settings){
8794        LOGE("Reprocess settings cannot be NULL");
8795        return BAD_VALUE;
8796    }
8797
8798    if (NULL == reprocParam) {
8799        LOGE("Invalid reprocessing metadata buffer");
8800        return BAD_VALUE;
8801    }
8802    clear_metadata_buffer(reprocParam);
8803
8804    /*we need to update the frame number in the parameters*/
8805    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8806            request->frame_number)) {
8807        LOGE("Failed to set the frame number in the parameters");
8808        return BAD_VALUE;
8809    }
8810
8811    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8812    if (rc < 0) {
8813        LOGE("Failed to translate reproc request");
8814        return rc;
8815    }
8816
8817    CameraMetadata frame_settings;
8818    frame_settings = request->settings;
8819    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8820            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8821        int32_t *crop_count =
8822                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8823        int32_t *crop_data =
8824                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8825        int32_t *roi_map =
8826                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8827        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8828            cam_crop_data_t crop_meta;
8829            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8830            crop_meta.num_of_streams = 1;
8831            crop_meta.crop_info[0].crop.left   = crop_data[0];
8832            crop_meta.crop_info[0].crop.top    = crop_data[1];
8833            crop_meta.crop_info[0].crop.width  = crop_data[2];
8834            crop_meta.crop_info[0].crop.height = crop_data[3];
8835
8836            crop_meta.crop_info[0].roi_map.left =
8837                    roi_map[0];
8838            crop_meta.crop_info[0].roi_map.top =
8839                    roi_map[1];
8840            crop_meta.crop_info[0].roi_map.width =
8841                    roi_map[2];
8842            crop_meta.crop_info[0].roi_map.height =
8843                    roi_map[3];
8844
8845            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8846                rc = BAD_VALUE;
8847            }
8848            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8849                    request->input_buffer->stream,
8850                    crop_meta.crop_info[0].crop.left,
8851                    crop_meta.crop_info[0].crop.top,
8852                    crop_meta.crop_info[0].crop.width,
8853                    crop_meta.crop_info[0].crop.height);
8854            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8855                    request->input_buffer->stream,
8856                    crop_meta.crop_info[0].roi_map.left,
8857                    crop_meta.crop_info[0].roi_map.top,
8858                    crop_meta.crop_info[0].roi_map.width,
8859                    crop_meta.crop_info[0].roi_map.height);
8860            } else {
8861                LOGE("Invalid reprocess crop count %d!", *crop_count);
8862            }
8863    } else {
8864        LOGE("No crop data from matching output stream");
8865    }
8866
8867    /* These settings are not needed for regular requests so handle them specially for
8868       reprocess requests; information needed for EXIF tags */
8869    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8870        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8871                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8872        if (NAME_NOT_FOUND != val) {
8873            uint32_t flashMode = (uint32_t)val;
8874            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8875                rc = BAD_VALUE;
8876            }
8877        } else {
8878            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8879                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8880        }
8881    } else {
8882        LOGH("No flash mode in reprocess settings");
8883    }
8884
8885    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8886        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8887        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8888            rc = BAD_VALUE;
8889        }
8890    } else {
8891        LOGH("No flash state in reprocess settings");
8892    }
8893
8894    return rc;
8895}
8896
8897/*===========================================================================
8898 * FUNCTION   : saveRequestSettings
8899 *
8900 * DESCRIPTION: Add any settings that might have changed to the request settings
8901 *              and save the settings to be applied on the frame
8902 *
8903 * PARAMETERS :
8904 *   @jpegMetadata : the extracted and/or modified jpeg metadata
8905 *   @request      : request with initial settings
8906 *
8907 * RETURN     :
8908 * camera_metadata_t* : pointer to the saved request settings
8909 *==========================================================================*/
8910camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8911        const CameraMetadata &jpegMetadata,
8912        camera3_capture_request_t *request)
8913{
8914    camera_metadata_t *resultMetadata;
8915    CameraMetadata camMetadata;
8916    camMetadata = request->settings;
8917
8918    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8919        int32_t thumbnail_size[2];
8920        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8921        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8922        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8923                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8924    }
8925
8926    resultMetadata = camMetadata.release();
8927    return resultMetadata;
8928}
8929
8930/*===========================================================================
8931 * FUNCTION   : setHalFpsRange
8932 *
8933 * DESCRIPTION: set FPS range parameter
8934 *
8935 *
8936 * PARAMETERS :
8937 *   @settings    : Metadata from framework
8938 *   @hal_metadata: Metadata buffer
8939 *
8940 *
8941 * RETURN     : success: NO_ERROR
8942 *              failure:
8943 *==========================================================================*/
8944int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8945        metadata_buffer_t *hal_metadata)
8946{
8947    int32_t rc = NO_ERROR;
8948    cam_fps_range_t fps_range;
8949    fps_range.min_fps = (float)
8950            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8951    fps_range.max_fps = (float)
8952            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8953    fps_range.video_min_fps = fps_range.min_fps;
8954    fps_range.video_max_fps = fps_range.max_fps;
8955
8956    LOGD("aeTargetFpsRange fps: [%f %f]",
8957            fps_range.min_fps, fps_range.max_fps);
8958    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8959     * follows:
8960     * ---------------------------------------------------------------|
8961     *      Video stream is absent in configure_streams               |
8962     *    (Camcorder preview before the first video record            |
8963     * ---------------------------------------------------------------|
8964     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8965     *                   |             |             | vid_min/max_fps|
8966     * ---------------------------------------------------------------|
8967     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8968     *                   |-------------|-------------|----------------|
8969     *                   |  [240, 240] |     240     |  [240, 240]    |
8970     * ---------------------------------------------------------------|
8971     *     Video stream is present in configure_streams               |
8972     * ---------------------------------------------------------------|
8973     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8974     *                   |             |             | vid_min/max_fps|
8975     * ---------------------------------------------------------------|
8976     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8977     * (camcorder prev   |-------------|-------------|----------------|
8978     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
8979     *  is stopped)      |             |             |                |
8980     * ---------------------------------------------------------------|
8981     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
8982     *                   |-------------|-------------|----------------|
8983     *                   |  [240, 240] |     240     |  [240, 240]    |
8984     * ---------------------------------------------------------------|
8985     * When Video stream is absent in configure_streams,
8986     * preview fps = sensor_fps / batchsize
8987     * Eg: for 240fps at batchSize 4, preview = 60fps
8988     *     for 120fps at batchSize 4, preview = 30fps
8989     *
8990     * When video stream is present in configure_streams, preview fps is as per
8991     * the ratio of preview buffers to video buffers requested in process
8992     * capture request
8993     */
8994    mBatchSize = 0;
8995    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8996        fps_range.min_fps = fps_range.video_max_fps;
8997        fps_range.video_min_fps = fps_range.video_max_fps;
8998        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8999                fps_range.max_fps);
9000        if (NAME_NOT_FOUND != val) {
9001            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9002            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9003                return BAD_VALUE;
9004            }
9005
9006            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9007                /* If batchmode is currently in progress and the fps changes,
9008                 * set the flag to restart the sensor */
9009                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9010                        (mHFRVideoFps != fps_range.max_fps)) {
9011                    mNeedSensorRestart = true;
9012                }
9013                mHFRVideoFps = fps_range.max_fps;
9014                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9015                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9016                    mBatchSize = MAX_HFR_BATCH_SIZE;
9017                }
9018             }
9019            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9020
9021         }
9022    } else {
9023        /* HFR mode is session param in backend/ISP. This should be reset when
9024         * in non-HFR mode  */
9025        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9026        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9027            return BAD_VALUE;
9028        }
9029    }
9030    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9031        return BAD_VALUE;
9032    }
9033    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9034            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9035    return rc;
9036}
9037
9038/*===========================================================================
9039 * FUNCTION   : translateToHalMetadata
9040 *
9041 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9042 *
9043 *
9044 * PARAMETERS :
9045 *   @request  : request sent from framework
9046 *
9047 *
9048 * RETURN     : success: NO_ERROR
9049 *              failure:
9050 *==========================================================================*/
9051int QCamera3HardwareInterface::translateToHalMetadata
9052                                  (const camera3_capture_request_t *request,
9053                                   metadata_buffer_t *hal_metadata,
9054                                   uint32_t snapshotStreamId)
9055{
9056    int rc = 0;
9057    CameraMetadata frame_settings;
9058    frame_settings = request->settings;
9059
9060    /* Do not change the order of the following list unless you know what you are
9061     * doing.
9062     * The order is laid out in such a way that parameters in the front of the table
9063     * may be used to override the parameters later in the table. Examples are:
9064     * 1. META_MODE should precede AEC/AWB/AF MODE
9065     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9066     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9067     * 4. Any mode should precede it's corresponding settings
9068     */
9069    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9070        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9071        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9072            rc = BAD_VALUE;
9073        }
9074        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9075        if (rc != NO_ERROR) {
9076            LOGE("extractSceneMode failed");
9077        }
9078    }
9079
9080    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9081        uint8_t fwk_aeMode =
9082            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9083        uint8_t aeMode;
9084        int32_t redeye;
9085
9086        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9087            aeMode = CAM_AE_MODE_OFF;
9088        } else {
9089            aeMode = CAM_AE_MODE_ON;
9090        }
9091        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9092            redeye = 1;
9093        } else {
9094            redeye = 0;
9095        }
9096
9097        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9098                fwk_aeMode);
9099        if (NAME_NOT_FOUND != val) {
9100            int32_t flashMode = (int32_t)val;
9101            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9102        }
9103
9104        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9105        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9106            rc = BAD_VALUE;
9107        }
9108    }
9109
9110    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9111        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9112        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9113                fwk_whiteLevel);
9114        if (NAME_NOT_FOUND != val) {
9115            uint8_t whiteLevel = (uint8_t)val;
9116            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9117                rc = BAD_VALUE;
9118            }
9119        }
9120    }
9121
9122    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9123        uint8_t fwk_cacMode =
9124                frame_settings.find(
9125                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9126        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9127                fwk_cacMode);
9128        if (NAME_NOT_FOUND != val) {
9129            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9130            bool entryAvailable = FALSE;
9131            // Check whether Frameworks set CAC mode is supported in device or not
9132            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9133                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9134                    entryAvailable = TRUE;
9135                    break;
9136                }
9137            }
9138            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9139            // If entry not found then set the device supported mode instead of frameworks mode i.e,
9140            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9141            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9142            if (entryAvailable == FALSE) {
9143                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9144                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9145                } else {
9146                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9147                        // High is not supported and so set the FAST as spec say's underlying
9148                        // device implementation can be the same for both modes.
9149                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9150                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9151                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9152                        // in order to avoid the fps drop due to high quality
9153                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9154                    } else {
9155                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9156                    }
9157                }
9158            }
9159            LOGD("Final cacMode is %d", cacMode);
9160            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9161                rc = BAD_VALUE;
9162            }
9163        } else {
9164            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9165        }
9166    }
9167
9168    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9169        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9170        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9171                fwk_focusMode);
9172        if (NAME_NOT_FOUND != val) {
9173            uint8_t focusMode = (uint8_t)val;
9174            LOGD("set focus mode %d", focusMode);
9175            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9176                rc = BAD_VALUE;
9177            }
9178        }
9179    }
9180
9181    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9182        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9183        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9184                focalDistance)) {
9185            rc = BAD_VALUE;
9186        }
9187    }
9188
9189    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9190        uint8_t fwk_antibandingMode =
9191                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9192        int val = lookupHalName(ANTIBANDING_MODES_MAP,
9193                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9194        if (NAME_NOT_FOUND != val) {
9195            uint32_t hal_antibandingMode = (uint32_t)val;
9196            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9197                    hal_antibandingMode)) {
9198                rc = BAD_VALUE;
9199            }
9200        }
9201    }
9202
9203    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9204        int32_t expCompensation = frame_settings.find(
9205                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9206        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9207            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9208        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9209            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9210        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9211                expCompensation)) {
9212            rc = BAD_VALUE;
9213        }
9214    }
9215
9216    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9217        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9218        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9219            rc = BAD_VALUE;
9220        }
9221    }
9222    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9223        rc = setHalFpsRange(frame_settings, hal_metadata);
9224        if (rc != NO_ERROR) {
9225            LOGE("setHalFpsRange failed");
9226        }
9227    }
9228
9229    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9230        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9231        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9232            rc = BAD_VALUE;
9233        }
9234    }
9235
9236    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9237        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9238        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9239                fwk_effectMode);
9240        if (NAME_NOT_FOUND != val) {
9241            uint8_t effectMode = (uint8_t)val;
9242            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9243                rc = BAD_VALUE;
9244            }
9245        }
9246    }
9247
9248    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9249        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9250        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9251                colorCorrectMode)) {
9252            rc = BAD_VALUE;
9253        }
9254    }
9255
9256    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9257        cam_color_correct_gains_t colorCorrectGains;
9258        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9259            colorCorrectGains.gains[i] =
9260                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9261        }
9262        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9263                colorCorrectGains)) {
9264            rc = BAD_VALUE;
9265        }
9266    }
9267
9268    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9269        cam_color_correct_matrix_t colorCorrectTransform;
9270        cam_rational_type_t transform_elem;
9271        size_t num = 0;
9272        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9273           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9274              transform_elem.numerator =
9275                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9276              transform_elem.denominator =
9277                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9278              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9279              num++;
9280           }
9281        }
9282        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9283                colorCorrectTransform)) {
9284            rc = BAD_VALUE;
9285        }
9286    }
9287
9288    cam_trigger_t aecTrigger;
9289    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9290    aecTrigger.trigger_id = -1;
9291    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9292        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9293        aecTrigger.trigger =
9294            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9295        aecTrigger.trigger_id =
9296            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9297        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9298                aecTrigger)) {
9299            rc = BAD_VALUE;
9300        }
9301        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9302                aecTrigger.trigger, aecTrigger.trigger_id);
9303    }
9304
9305    /*af_trigger must come with a trigger id*/
9306    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9307        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9308        cam_trigger_t af_trigger;
9309        af_trigger.trigger =
9310            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9311        af_trigger.trigger_id =
9312            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9313        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9314            rc = BAD_VALUE;
9315        }
9316        LOGD("AfTrigger: %d AfTriggerID: %d",
9317                af_trigger.trigger, af_trigger.trigger_id);
9318    }
9319
9320    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9321        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9322        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9323            rc = BAD_VALUE;
9324        }
9325    }
9326    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9327        cam_edge_application_t edge_application;
9328        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9329        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9330            edge_application.sharpness = 0;
9331        } else {
9332            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9333        }
9334        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9335            rc = BAD_VALUE;
9336        }
9337    }
9338
9339    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9340        int32_t respectFlashMode = 1;
9341        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9342            uint8_t fwk_aeMode =
9343                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9344            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
9345                respectFlashMode = 0;
9346                LOGH("AE Mode controls flash, ignore android.flash.mode");
9347            }
9348        }
9349        if (respectFlashMode) {
9350            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9351                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9352            LOGH("flash mode after mapping %d", val);
9353            // To check: CAM_INTF_META_FLASH_MODE usage
9354            if (NAME_NOT_FOUND != val) {
9355                uint8_t flashMode = (uint8_t)val;
9356                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9357                    rc = BAD_VALUE;
9358                }
9359            }
9360        }
9361    }
9362
9363    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9364        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9365        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9366            rc = BAD_VALUE;
9367        }
9368    }
9369
9370    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9371        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9372        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9373                flashFiringTime)) {
9374            rc = BAD_VALUE;
9375        }
9376    }
9377
9378    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9379        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9380        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9381                hotPixelMode)) {
9382            rc = BAD_VALUE;
9383        }
9384    }
9385
9386    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9387        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9388        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9389                lensAperture)) {
9390            rc = BAD_VALUE;
9391        }
9392    }
9393
9394    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9395        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9396        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9397                filterDensity)) {
9398            rc = BAD_VALUE;
9399        }
9400    }
9401
9402    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9403        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9404        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9405                focalLength)) {
9406            rc = BAD_VALUE;
9407        }
9408    }
9409
9410    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9411        uint8_t optStabMode =
9412                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9413        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9414                optStabMode)) {
9415            rc = BAD_VALUE;
9416        }
9417    }
9418
9419    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9420        uint8_t videoStabMode =
9421                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9422        LOGD("videoStabMode from APP = %d", videoStabMode);
9423        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9424                videoStabMode)) {
9425            rc = BAD_VALUE;
9426        }
9427    }
9428
9429
9430    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9431        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9432        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9433                noiseRedMode)) {
9434            rc = BAD_VALUE;
9435        }
9436    }
9437
9438    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9439        float reprocessEffectiveExposureFactor =
9440            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9441        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9442                reprocessEffectiveExposureFactor)) {
9443            rc = BAD_VALUE;
9444        }
9445    }
9446
9447    cam_crop_region_t scalerCropRegion;
9448    bool scalerCropSet = false;
9449    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9450        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9451        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9452        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9453        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9454
9455        // Map coordinate system from active array to sensor output.
9456        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9457                scalerCropRegion.width, scalerCropRegion.height);
9458
9459        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9460                scalerCropRegion)) {
9461            rc = BAD_VALUE;
9462        }
9463        scalerCropSet = true;
9464    }
9465
9466    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9467        int64_t sensorExpTime =
9468                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9469        LOGD("setting sensorExpTime %lld", sensorExpTime);
9470        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9471                sensorExpTime)) {
9472            rc = BAD_VALUE;
9473        }
9474    }
9475
9476    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9477        int64_t sensorFrameDuration =
9478                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9479        int64_t minFrameDuration = getMinFrameDuration(request);
9480        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9481        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9482            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9483        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9484        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9485                sensorFrameDuration)) {
9486            rc = BAD_VALUE;
9487        }
9488    }
9489
9490    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9491        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9492        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9493                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9494        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9495                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9496        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9497        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9498                sensorSensitivity)) {
9499            rc = BAD_VALUE;
9500        }
9501    }
9502
9503    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9504        int32_t ispSensitivity =
9505            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9506        if (ispSensitivity <
9507            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9508                ispSensitivity =
9509                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9510                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9511        }
9512        if (ispSensitivity >
9513            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9514                ispSensitivity =
9515                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9516                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9517        }
9518        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9519                ispSensitivity)) {
9520            rc = BAD_VALUE;
9521        }
9522    }
9523
9524    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9525        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9526        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9527            rc = BAD_VALUE;
9528        }
9529    }
9530
9531    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9532        uint8_t fwk_facedetectMode =
9533                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9534
9535        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9536                fwk_facedetectMode);
9537
9538        if (NAME_NOT_FOUND != val) {
9539            uint8_t facedetectMode = (uint8_t)val;
9540            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9541                    facedetectMode)) {
9542                rc = BAD_VALUE;
9543            }
9544        }
9545    }
9546
9547    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9548        uint8_t histogramMode =
9549                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9550        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9551                histogramMode)) {
9552            rc = BAD_VALUE;
9553        }
9554    }
9555
9556    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9557        uint8_t sharpnessMapMode =
9558                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9559        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9560                sharpnessMapMode)) {
9561            rc = BAD_VALUE;
9562        }
9563    }
9564
9565    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9566        uint8_t tonemapMode =
9567                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9568        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9569            rc = BAD_VALUE;
9570        }
9571    }
9572    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9573    /*All tonemap channels will have the same number of points*/
9574    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9575        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9576        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9577        cam_rgb_tonemap_curves tonemapCurves;
9578        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9579        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9580            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9581                     tonemapCurves.tonemap_points_cnt,
9582                    CAM_MAX_TONEMAP_CURVE_SIZE);
9583            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9584        }
9585
9586        /* ch0 = G*/
9587        size_t point = 0;
9588        cam_tonemap_curve_t tonemapCurveGreen;
9589        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9590            for (size_t j = 0; j < 2; j++) {
9591               tonemapCurveGreen.tonemap_points[i][j] =
9592                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9593               point++;
9594            }
9595        }
9596        tonemapCurves.curves[0] = tonemapCurveGreen;
9597
9598        /* ch 1 = B */
9599        point = 0;
9600        cam_tonemap_curve_t tonemapCurveBlue;
9601        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9602            for (size_t j = 0; j < 2; j++) {
9603               tonemapCurveBlue.tonemap_points[i][j] =
9604                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9605               point++;
9606            }
9607        }
9608        tonemapCurves.curves[1] = tonemapCurveBlue;
9609
9610        /* ch 2 = R */
9611        point = 0;
9612        cam_tonemap_curve_t tonemapCurveRed;
9613        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9614            for (size_t j = 0; j < 2; j++) {
9615               tonemapCurveRed.tonemap_points[i][j] =
9616                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9617               point++;
9618            }
9619        }
9620        tonemapCurves.curves[2] = tonemapCurveRed;
9621
9622        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9623                tonemapCurves)) {
9624            rc = BAD_VALUE;
9625        }
9626    }
9627
9628    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9629        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9630        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9631                captureIntent)) {
9632            rc = BAD_VALUE;
9633        }
9634    }
9635
9636    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9637        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9638        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9639                blackLevelLock)) {
9640            rc = BAD_VALUE;
9641        }
9642    }
9643
9644    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9645        uint8_t lensShadingMapMode =
9646                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9647        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9648                lensShadingMapMode)) {
9649            rc = BAD_VALUE;
9650        }
9651    }
9652
9653    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9654        cam_area_t roi;
9655        bool reset = true;
9656        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9657
9658        // Map coordinate system from active array to sensor output.
9659        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9660                roi.rect.height);
9661
9662        if (scalerCropSet) {
9663            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9664        }
9665        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9666            rc = BAD_VALUE;
9667        }
9668    }
9669
9670    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9671        cam_area_t roi;
9672        bool reset = true;
9673        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9674
9675        // Map coordinate system from active array to sensor output.
9676        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9677                roi.rect.height);
9678
9679        if (scalerCropSet) {
9680            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9681        }
9682        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9683            rc = BAD_VALUE;
9684        }
9685    }
9686
9687    // CDS for non-HFR non-video mode
9688    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9689            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9690        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9691        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9692            LOGE("Invalid CDS mode %d!", *fwk_cds);
9693        } else {
9694            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9695                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9696                rc = BAD_VALUE;
9697            }
9698        }
9699    }
9700
9701    // TNR
9702    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9703        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9704        uint8_t b_TnrRequested = 0;
9705        cam_denoise_param_t tnr;
9706        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9707        tnr.process_plates =
9708            (cam_denoise_process_type_t)frame_settings.find(
9709            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9710        b_TnrRequested = tnr.denoise_enable;
9711        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9712            rc = BAD_VALUE;
9713        }
9714    }
9715
9716    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9717        int32_t fwk_testPatternMode =
9718                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9719        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9720                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9721
9722        if (NAME_NOT_FOUND != testPatternMode) {
9723            cam_test_pattern_data_t testPatternData;
9724            memset(&testPatternData, 0, sizeof(testPatternData));
9725            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9726            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9727                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9728                int32_t *fwk_testPatternData =
9729                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9730                testPatternData.r = fwk_testPatternData[0];
9731                testPatternData.b = fwk_testPatternData[3];
9732                switch (gCamCapability[mCameraId]->color_arrangement) {
9733                    case CAM_FILTER_ARRANGEMENT_RGGB:
9734                    case CAM_FILTER_ARRANGEMENT_GRBG:
9735                        testPatternData.gr = fwk_testPatternData[1];
9736                        testPatternData.gb = fwk_testPatternData[2];
9737                        break;
9738                    case CAM_FILTER_ARRANGEMENT_GBRG:
9739                    case CAM_FILTER_ARRANGEMENT_BGGR:
9740                        testPatternData.gr = fwk_testPatternData[2];
9741                        testPatternData.gb = fwk_testPatternData[1];
9742                        break;
9743                    default:
9744                        LOGE("color arrangement %d is not supported",
9745                                gCamCapability[mCameraId]->color_arrangement);
9746                        break;
9747                }
9748            }
9749            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9750                    testPatternData)) {
9751                rc = BAD_VALUE;
9752            }
9753        } else {
9754            LOGE("Invalid framework sensor test pattern mode %d",
9755                    fwk_testPatternMode);
9756        }
9757    }
9758
9759    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9760        size_t count = 0;
9761        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9762        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9763                gps_coords.data.d, gps_coords.count, count);
9764        if (gps_coords.count != count) {
9765            rc = BAD_VALUE;
9766        }
9767    }
9768
9769    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9770        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9771        size_t count = 0;
9772        const char *gps_methods_src = (const char *)
9773                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9774        memset(gps_methods, '\0', sizeof(gps_methods));
9775        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9776        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9777                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9778        if (GPS_PROCESSING_METHOD_SIZE != count) {
9779            rc = BAD_VALUE;
9780        }
9781    }
9782
9783    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9784        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9785        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9786                gps_timestamp)) {
9787            rc = BAD_VALUE;
9788        }
9789    }
9790
9791    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9792        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9793        cam_rotation_info_t rotation_info;
9794        if (orientation == 0) {
9795           rotation_info.rotation = ROTATE_0;
9796        } else if (orientation == 90) {
9797           rotation_info.rotation = ROTATE_90;
9798        } else if (orientation == 180) {
9799           rotation_info.rotation = ROTATE_180;
9800        } else if (orientation == 270) {
9801           rotation_info.rotation = ROTATE_270;
9802        }
9803        rotation_info.streamId = snapshotStreamId;
9804        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9805        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9806            rc = BAD_VALUE;
9807        }
9808    }
9809
9810    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9811        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9812        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9813            rc = BAD_VALUE;
9814        }
9815    }
9816
9817    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9818        uint32_t thumb_quality = (uint32_t)
9819                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9820        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9821                thumb_quality)) {
9822            rc = BAD_VALUE;
9823        }
9824    }
9825
9826    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9827        cam_dimension_t dim;
9828        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9829        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9830        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9831            rc = BAD_VALUE;
9832        }
9833    }
9834
9835    // Internal metadata
9836    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9837        size_t count = 0;
9838        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9839        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9840                privatedata.data.i32, privatedata.count, count);
9841        if (privatedata.count != count) {
9842            rc = BAD_VALUE;
9843        }
9844    }
9845
9846    if (m_debug_avtimer || frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
9847        uint8_t* use_av_timer = NULL;
9848
9849        if (m_debug_avtimer){
9850            use_av_timer = &m_debug_avtimer;
9851        }
9852        else{
9853            use_av_timer =
9854                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
9855        }
9856
9857        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
9858            rc = BAD_VALUE;
9859        }
9860    }
9861
9862    // EV step
9863    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9864            gCamCapability[mCameraId]->exp_compensation_step)) {
9865        rc = BAD_VALUE;
9866    }
9867
9868    // CDS info
9869    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9870        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9871                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9872
9873        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9874                CAM_INTF_META_CDS_DATA, *cdsData)) {
9875            rc = BAD_VALUE;
9876        }
9877    }
9878
9879    // Hybrid AE
9880    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9881        uint8_t *hybrid_ae = (uint8_t *)
9882                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9883
9884        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9885                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9886            rc = BAD_VALUE;
9887        }
9888    }
9889
9890    return rc;
9891}
9892
9893/*===========================================================================
9894 * FUNCTION   : captureResultCb
9895 *
9896 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9897 *
9898 * PARAMETERS :
9899 *   @frame  : frame information from mm-camera-interface
9900 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9901 *   @userdata: userdata
9902 *
9903 * RETURN     : NONE
9904 *==========================================================================*/
9905void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9906                camera3_stream_buffer_t *buffer,
9907                uint32_t frame_number, bool isInputBuffer, void *userdata)
9908{
9909    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9910    if (hw == NULL) {
9911        LOGE("Invalid hw %p", hw);
9912        return;
9913    }
9914
9915    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9916    return;
9917}
9918
9919
9920/*===========================================================================
9921 * FUNCTION   : initialize
9922 *
9923 * DESCRIPTION: Pass framework callback pointers to HAL
9924 *
9925 * PARAMETERS :
9926 *
9927 *
9928 * RETURN     : Success : 0
9929 *              Failure: -ENODEV
9930 *==========================================================================*/
9931
9932int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9933                                  const camera3_callback_ops_t *callback_ops)
9934{
9935    LOGD("E");
9936    QCamera3HardwareInterface *hw =
9937        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9938    if (!hw) {
9939        LOGE("NULL camera device");
9940        return -ENODEV;
9941    }
9942
9943    int rc = hw->initialize(callback_ops);
9944    LOGD("X");
9945    return rc;
9946}
9947
9948/*===========================================================================
9949 * FUNCTION   : configure_streams
9950 *
9951 * DESCRIPTION:
9952 *
9953 * PARAMETERS :
9954 *
9955 *
9956 * RETURN     : Success: 0
9957 *              Failure: -EINVAL (if stream configuration is invalid)
9958 *                       -ENODEV (fatal error)
9959 *==========================================================================*/
9960
9961int QCamera3HardwareInterface::configure_streams(
9962        const struct camera3_device *device,
9963        camera3_stream_configuration_t *stream_list)
9964{
9965    LOGD("E");
9966    QCamera3HardwareInterface *hw =
9967        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9968    if (!hw) {
9969        LOGE("NULL camera device");
9970        return -ENODEV;
9971    }
9972    int rc = hw->configureStreams(stream_list);
9973    LOGD("X");
9974    return rc;
9975}
9976
9977/*===========================================================================
9978 * FUNCTION   : construct_default_request_settings
9979 *
9980 * DESCRIPTION: Configure a settings buffer to meet the required use case
9981 *
9982 * PARAMETERS :
9983 *
9984 *
9985 * RETURN     : Success: Return valid metadata
9986 *              Failure: Return NULL
9987 *==========================================================================*/
9988const camera_metadata_t* QCamera3HardwareInterface::
9989    construct_default_request_settings(const struct camera3_device *device,
9990                                        int type)
9991{
9992
9993    LOGD("E");
9994    camera_metadata_t* fwk_metadata = NULL;
9995    QCamera3HardwareInterface *hw =
9996        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9997    if (!hw) {
9998        LOGE("NULL camera device");
9999        return NULL;
10000    }
10001
10002    fwk_metadata = hw->translateCapabilityToMetadata(type);
10003
10004    LOGD("X");
10005    return fwk_metadata;
10006}
10007
10008/*===========================================================================
10009 * FUNCTION   : process_capture_request
10010 *
10011 * DESCRIPTION:
10012 *
10013 * PARAMETERS :
10014 *
10015 *
10016 * RETURN     :
10017 *==========================================================================*/
10018int QCamera3HardwareInterface::process_capture_request(
10019                    const struct camera3_device *device,
10020                    camera3_capture_request_t *request)
10021{
10022    LOGD("E");
10023    QCamera3HardwareInterface *hw =
10024        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10025    if (!hw) {
10026        LOGE("NULL camera device");
10027        return -EINVAL;
10028    }
10029
10030    int rc = hw->processCaptureRequest(request);
10031    LOGD("X");
10032    return rc;
10033}
10034
10035/*===========================================================================
10036 * FUNCTION   : dump
10037 *
10038 * DESCRIPTION:
10039 *
10040 * PARAMETERS :
10041 *
10042 *
10043 * RETURN     :
10044 *==========================================================================*/
10045
10046void QCamera3HardwareInterface::dump(
10047                const struct camera3_device *device, int fd)
10048{
10049    /* Log level property is read when "adb shell dumpsys media.camera" is
10050       called so that the log level can be controlled without restarting
10051       the media server */
10052    getLogLevel();
10053
10054    LOGD("E");
10055    QCamera3HardwareInterface *hw =
10056        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10057    if (!hw) {
10058        LOGE("NULL camera device");
10059        return;
10060    }
10061
10062    hw->dump(fd);
10063    LOGD("X");
10064    return;
10065}
10066
10067/*===========================================================================
10068 * FUNCTION   : flush
10069 *
10070 * DESCRIPTION:
10071 *
10072 * PARAMETERS :
10073 *
10074 *
10075 * RETURN     :
10076 *==========================================================================*/
10077
10078int QCamera3HardwareInterface::flush(
10079                const struct camera3_device *device)
10080{
10081    int rc;
10082    LOGD("E");
10083    QCamera3HardwareInterface *hw =
10084        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10085    if (!hw) {
10086        LOGE("NULL camera device");
10087        return -EINVAL;
10088    }
10089
10090    pthread_mutex_lock(&hw->mMutex);
10091    // Validate current state
10092    switch (hw->mState) {
10093        case STARTED:
10094            /* valid state */
10095            break;
10096
10097        case ERROR:
10098            pthread_mutex_unlock(&hw->mMutex);
10099            hw->handleCameraDeviceError();
10100            return -ENODEV;
10101
10102        default:
10103            LOGI("Flush returned during state %d", hw->mState);
10104            pthread_mutex_unlock(&hw->mMutex);
10105            return 0;
10106    }
10107    pthread_mutex_unlock(&hw->mMutex);
10108
10109    rc = hw->flush(true /* restart channels */ );
10110    LOGD("X");
10111    return rc;
10112}
10113
10114/*===========================================================================
10115 * FUNCTION   : close_camera_device
10116 *
10117 * DESCRIPTION:
10118 *
10119 * PARAMETERS :
10120 *
10121 *
10122 * RETURN     :
10123 *==========================================================================*/
10124int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10125{
10126    int ret = NO_ERROR;
10127    QCamera3HardwareInterface *hw =
10128        reinterpret_cast<QCamera3HardwareInterface *>(
10129            reinterpret_cast<camera3_device_t *>(device)->priv);
10130    if (!hw) {
10131        LOGE("NULL camera device");
10132        return BAD_VALUE;
10133    }
10134
10135    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10136    delete hw;
10137    LOGI("[KPI Perf]: X");
10138    return ret;
10139}
10140
10141/*===========================================================================
10142 * FUNCTION   : getWaveletDenoiseProcessPlate
10143 *
10144 * DESCRIPTION: query wavelet denoise process plate
10145 *
10146 * PARAMETERS : None
10147 *
10148 * RETURN     : WNR prcocess plate value
10149 *==========================================================================*/
10150cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10151{
10152    char prop[PROPERTY_VALUE_MAX];
10153    memset(prop, 0, sizeof(prop));
10154    property_get("persist.denoise.process.plates", prop, "0");
10155    int processPlate = atoi(prop);
10156    switch(processPlate) {
10157    case 0:
10158        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10159    case 1:
10160        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10161    case 2:
10162        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10163    case 3:
10164        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10165    default:
10166        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10167    }
10168}
10169
10170
10171/*===========================================================================
10172 * FUNCTION   : getTemporalDenoiseProcessPlate
10173 *
10174 * DESCRIPTION: query temporal denoise process plate
10175 *
10176 * PARAMETERS : None
10177 *
10178 * RETURN     : TNR prcocess plate value
10179 *==========================================================================*/
10180cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10181{
10182    char prop[PROPERTY_VALUE_MAX];
10183    memset(prop, 0, sizeof(prop));
10184    property_get("persist.tnr.process.plates", prop, "0");
10185    int processPlate = atoi(prop);
10186    switch(processPlate) {
10187    case 0:
10188        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10189    case 1:
10190        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10191    case 2:
10192        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10193    case 3:
10194        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10195    default:
10196        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10197    }
10198}
10199
10200
10201/*===========================================================================
10202 * FUNCTION   : extractSceneMode
10203 *
10204 * DESCRIPTION: Extract scene mode from frameworks set metadata
10205 *
10206 * PARAMETERS :
10207 *      @frame_settings: CameraMetadata reference
10208 *      @metaMode: ANDROID_CONTORL_MODE
10209 *      @hal_metadata: hal metadata structure
10210 *
10211 * RETURN     : None
10212 *==========================================================================*/
10213int32_t QCamera3HardwareInterface::extractSceneMode(
10214        const CameraMetadata &frame_settings, uint8_t metaMode,
10215        metadata_buffer_t *hal_metadata)
10216{
10217    int32_t rc = NO_ERROR;
10218
10219    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10220        camera_metadata_ro_entry entry =
10221                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10222        if (0 == entry.count)
10223            return rc;
10224
10225        uint8_t fwk_sceneMode = entry.data.u8[0];
10226
10227        int val = lookupHalName(SCENE_MODES_MAP,
10228                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10229                fwk_sceneMode);
10230        if (NAME_NOT_FOUND != val) {
10231            uint8_t sceneMode = (uint8_t)val;
10232            LOGD("sceneMode: %d", sceneMode);
10233            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10234                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10235                rc = BAD_VALUE;
10236            }
10237        }
10238    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10239            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10240        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10241        LOGD("sceneMode: %d", sceneMode);
10242        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10243                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10244            rc = BAD_VALUE;
10245        }
10246    }
10247    return rc;
10248}
10249
10250/*===========================================================================
10251 * FUNCTION   : needRotationReprocess
10252 *
10253 * DESCRIPTION: if rotation needs to be done by reprocess in pp
10254 *
10255 * PARAMETERS : none
10256 *
10257 * RETURN     : true: needed
10258 *              false: no need
10259 *==========================================================================*/
10260bool QCamera3HardwareInterface::needRotationReprocess()
10261{
10262    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10263        // current rotation is not zero, and pp has the capability to process rotation
10264        LOGH("need do reprocess for rotation");
10265        return true;
10266    }
10267
10268    return false;
10269}
10270
10271/*===========================================================================
10272 * FUNCTION   : needReprocess
10273 *
10274 * DESCRIPTION: if reprocess in needed
10275 *
10276 * PARAMETERS : none
10277 *
10278 * RETURN     : true: needed
10279 *              false: no need
10280 *==========================================================================*/
10281bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10282{
10283    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10284        // TODO: add for ZSL HDR later
10285        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10286        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10287            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10288            return true;
10289        } else {
10290            LOGH("already post processed frame");
10291            return false;
10292        }
10293    }
10294    return needRotationReprocess();
10295}
10296
10297/*===========================================================================
10298 * FUNCTION   : needJpegExifRotation
10299 *
10300 * DESCRIPTION: if rotation from jpeg is needed
10301 *
10302 * PARAMETERS : none
10303 *
10304 * RETURN     : true: needed
10305 *              false: no need
10306 *==========================================================================*/
10307bool QCamera3HardwareInterface::needJpegExifRotation()
10308{
10309   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10310    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10311       LOGD("Need use Jpeg EXIF Rotation");
10312       return true;
10313    }
10314    return false;
10315}
10316
10317/*===========================================================================
10318 * FUNCTION   : addOfflineReprocChannel
10319 *
10320 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10321 *              coming from input channel
10322 *
10323 * PARAMETERS :
10324 *   @config  : reprocess configuration
10325 *   @inputChHandle : pointer to the input (source) channel
10326 *
10327 *
10328 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
10329 *==========================================================================*/
10330QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10331        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10332{
10333    int32_t rc = NO_ERROR;
10334    QCamera3ReprocessChannel *pChannel = NULL;
10335
10336    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10337            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
10338            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10339    if (NULL == pChannel) {
10340        LOGE("no mem for reprocess channel");
10341        return NULL;
10342    }
10343
10344    rc = pChannel->initialize(IS_TYPE_NONE);
10345    if (rc != NO_ERROR) {
10346        LOGE("init reprocess channel failed, ret = %d", rc);
10347        delete pChannel;
10348        return NULL;
10349    }
10350
10351    // pp feature config
10352    cam_pp_feature_config_t pp_config;
10353    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10354
10355    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10356    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10357            & CAM_QCOM_FEATURE_DSDN) {
10358        //Use CPP CDS incase h/w supports it.
10359        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10360        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10361    }
10362    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10363        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10364    }
10365
10366    rc = pChannel->addReprocStreamsFromSource(pp_config,
10367            config,
10368            IS_TYPE_NONE,
10369            mMetadataChannel);
10370
10371    if (rc != NO_ERROR) {
10372        delete pChannel;
10373        return NULL;
10374    }
10375    return pChannel;
10376}
10377
10378/*===========================================================================
10379 * FUNCTION   : getMobicatMask
10380 *
10381 * DESCRIPTION: returns mobicat mask
10382 *
10383 * PARAMETERS : none
10384 *
10385 * RETURN     : mobicat mask
10386 *
10387 *==========================================================================*/
10388uint8_t QCamera3HardwareInterface::getMobicatMask()
10389{
10390    return m_MobicatMask;
10391}
10392
10393/*===========================================================================
10394 * FUNCTION   : setMobicat
10395 *
10396 * DESCRIPTION: set Mobicat on/off.
10397 *
10398 * PARAMETERS :
10399 *   @params  : none
10400 *
10401 * RETURN     : int32_t type of status
10402 *              NO_ERROR  -- success
10403 *              none-zero failure code
10404 *==========================================================================*/
10405int32_t QCamera3HardwareInterface::setMobicat()
10406{
10407    char value [PROPERTY_VALUE_MAX];
10408    property_get("persist.camera.mobicat", value, "0");
10409    int32_t ret = NO_ERROR;
10410    uint8_t enableMobi = (uint8_t)atoi(value);
10411
10412    if (enableMobi) {
10413        tune_cmd_t tune_cmd;
10414        tune_cmd.type = SET_RELOAD_CHROMATIX;
10415        tune_cmd.module = MODULE_ALL;
10416        tune_cmd.value = TRUE;
10417        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10418                CAM_INTF_PARM_SET_VFE_COMMAND,
10419                tune_cmd);
10420
10421        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10422                CAM_INTF_PARM_SET_PP_COMMAND,
10423                tune_cmd);
10424    }
10425    m_MobicatMask = enableMobi;
10426
10427    return ret;
10428}
10429
10430/*===========================================================================
10431* FUNCTION   : getLogLevel
10432*
10433* DESCRIPTION: Reads the log level property into a variable
10434*
10435* PARAMETERS :
10436*   None
10437*
10438* RETURN     :
10439*   None
10440*==========================================================================*/
10441void QCamera3HardwareInterface::getLogLevel()
10442{
10443    char prop[PROPERTY_VALUE_MAX];
10444    uint32_t globalLogLevel = 0;
10445
10446    property_get("persist.camera.hal.debug", prop, "0");
10447    int val = atoi(prop);
10448    if (0 <= val) {
10449        gCamHal3LogLevel = (uint32_t)val;
10450    }
10451
10452    property_get("persist.camera.kpi.debug", prop, "1");
10453    gKpiDebugLevel = atoi(prop);
10454
10455    property_get("persist.camera.global.debug", prop, "0");
10456    val = atoi(prop);
10457    if (0 <= val) {
10458        globalLogLevel = (uint32_t)val;
10459    }
10460
10461    /* Highest log level among hal.logs and global.logs is selected */
10462    if (gCamHal3LogLevel < globalLogLevel)
10463        gCamHal3LogLevel = globalLogLevel;
10464
10465    return;
10466}
10467
10468/*===========================================================================
10469 * FUNCTION   : validateStreamRotations
10470 *
10471 * DESCRIPTION: Check if the rotations requested are supported
10472 *
10473 * PARAMETERS :
10474 *   @stream_list : streams to be configured
10475 *
10476 * RETURN     : NO_ERROR on success
10477 *              -EINVAL on failure
10478 *
10479 *==========================================================================*/
10480int QCamera3HardwareInterface::validateStreamRotations(
10481        camera3_stream_configuration_t *streamList)
10482{
10483    int rc = NO_ERROR;
10484
10485    /*
10486    * Loop through all streams requested in configuration
10487    * Check if unsupported rotations have been requested on any of them
10488    */
10489    for (size_t j = 0; j < streamList->num_streams; j++){
10490        camera3_stream_t *newStream = streamList->streams[j];
10491
10492        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10493        bool isImplDef = (newStream->format ==
10494                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10495        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10496                isImplDef);
10497
10498        if (isRotated && (!isImplDef || isZsl)) {
10499            LOGE("Error: Unsupported rotation of %d requested for stream"
10500                    "type:%d and stream format:%d",
10501                    newStream->rotation, newStream->stream_type,
10502                    newStream->format);
10503            rc = -EINVAL;
10504            break;
10505        }
10506    }
10507
10508    return rc;
10509}
10510
10511/*===========================================================================
10512* FUNCTION   : getFlashInfo
10513*
10514* DESCRIPTION: Retrieve information about whether the device has a flash.
10515*
10516* PARAMETERS :
10517*   @cameraId  : Camera id to query
10518*   @hasFlash  : Boolean indicating whether there is a flash device
10519*                associated with given camera
10520*   @flashNode : If a flash device exists, this will be its device node.
10521*
10522* RETURN     :
10523*   None
10524*==========================================================================*/
10525void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10526        bool& hasFlash,
10527        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10528{
10529    cam_capability_t* camCapability = gCamCapability[cameraId];
10530    if (NULL == camCapability) {
10531        hasFlash = false;
10532        flashNode[0] = '\0';
10533    } else {
10534        hasFlash = camCapability->flash_available;
10535        strlcpy(flashNode,
10536                (char*)camCapability->flash_dev_name,
10537                QCAMERA_MAX_FILEPATH_LENGTH);
10538    }
10539}
10540
10541/*===========================================================================
10542* FUNCTION   : getEepromVersionInfo
10543*
10544* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10545*
10546* PARAMETERS : None
10547*
10548* RETURN     : string describing EEPROM version
10549*              "\0" if no such info available
10550*==========================================================================*/
10551const char *QCamera3HardwareInterface::getEepromVersionInfo()
10552{
10553    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10554}
10555
10556/*===========================================================================
10557* FUNCTION   : getLdafCalib
10558*
10559* DESCRIPTION: Retrieve Laser AF calibration data
10560*
10561* PARAMETERS : None
10562*
10563* RETURN     : Two uint32_t describing laser AF calibration data
10564*              NULL if none is available.
10565*==========================================================================*/
10566const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10567{
10568    if (mLdafCalibExist) {
10569        return &mLdafCalib[0];
10570    } else {
10571        return NULL;
10572    }
10573}
10574
10575/*===========================================================================
10576 * FUNCTION   : dynamicUpdateMetaStreamInfo
10577 *
10578 * DESCRIPTION: This function:
10579 *             (1) stops all the channels
10580 *             (2) returns error on pending requests and buffers
10581 *             (3) sends metastream_info in setparams
10582 *             (4) starts all channels
10583 *             This is useful when sensor has to be restarted to apply any
10584 *             settings such as frame rate from a different sensor mode
10585 *
10586 * PARAMETERS : None
10587 *
10588 * RETURN     : NO_ERROR on success
10589 *              Error codes on failure
10590 *
10591 *==========================================================================*/
10592int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10593{
10594    ATRACE_CALL();
10595    int rc = NO_ERROR;
10596
10597    LOGD("E");
10598
10599    rc = stopAllChannels();
10600    if (rc < 0) {
10601        LOGE("stopAllChannels failed");
10602        return rc;
10603    }
10604
10605    rc = notifyErrorForPendingRequests();
10606    if (rc < 0) {
10607        LOGE("notifyErrorForPendingRequests failed");
10608        return rc;
10609    }
10610
10611    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10612        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10613                "Format:%d",
10614                mStreamConfigInfo.type[i],
10615                mStreamConfigInfo.stream_sizes[i].width,
10616                mStreamConfigInfo.stream_sizes[i].height,
10617                mStreamConfigInfo.postprocess_mask[i],
10618                mStreamConfigInfo.format[i]);
10619    }
10620
10621    /* Send meta stream info once again so that ISP can start */
10622    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10623            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10624    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10625            mParameters);
10626    if (rc < 0) {
10627        LOGE("set Metastreaminfo failed. Sensor mode does not change");
10628    }
10629
10630    rc = startAllChannels();
10631    if (rc < 0) {
10632        LOGE("startAllChannels failed");
10633        return rc;
10634    }
10635
10636    LOGD("X");
10637    return rc;
10638}
10639
10640/*===========================================================================
10641 * FUNCTION   : stopAllChannels
10642 *
10643 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10644 *
10645 * PARAMETERS : None
10646 *
10647 * RETURN     : NO_ERROR on success
10648 *              Error codes on failure
10649 *
10650 *==========================================================================*/
10651int32_t QCamera3HardwareInterface::stopAllChannels()
10652{
10653    int32_t rc = NO_ERROR;
10654
10655    LOGD("Stopping all channels");
10656    // Stop the Streams/Channels
10657    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10658        it != mStreamInfo.end(); it++) {
10659        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10660        if (channel) {
10661            channel->stop();
10662        }
10663        (*it)->status = INVALID;
10664    }
10665
10666    if (mSupportChannel) {
10667        mSupportChannel->stop();
10668    }
10669    if (mAnalysisChannel) {
10670        mAnalysisChannel->stop();
10671    }
10672    if (mRawDumpChannel) {
10673        mRawDumpChannel->stop();
10674    }
10675    if (mMetadataChannel) {
10676        /* If content of mStreamInfo is not 0, there is metadata stream */
10677        mMetadataChannel->stop();
10678    }
10679
10680    LOGD("All channels stopped");
10681    return rc;
10682}
10683
10684/*===========================================================================
10685 * FUNCTION   : startAllChannels
10686 *
10687 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10688 *
10689 * PARAMETERS : None
10690 *
10691 * RETURN     : NO_ERROR on success
10692 *              Error codes on failure
10693 *
10694 *==========================================================================*/
10695int32_t QCamera3HardwareInterface::startAllChannels()
10696{
10697    int32_t rc = NO_ERROR;
10698
10699    LOGD("Start all channels ");
10700    // Start the Streams/Channels
10701    if (mMetadataChannel) {
10702        /* If content of mStreamInfo is not 0, there is metadata stream */
10703        rc = mMetadataChannel->start();
10704        if (rc < 0) {
10705            LOGE("META channel start failed");
10706            return rc;
10707        }
10708    }
10709    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10710        it != mStreamInfo.end(); it++) {
10711        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10712        if (channel) {
10713            rc = channel->start();
10714            if (rc < 0) {
10715                LOGE("channel start failed");
10716                return rc;
10717            }
10718        }
10719    }
10720    if (mAnalysisChannel) {
10721        mAnalysisChannel->start();
10722    }
10723    if (mSupportChannel) {
10724        rc = mSupportChannel->start();
10725        if (rc < 0) {
10726            LOGE("Support channel start failed");
10727            return rc;
10728        }
10729    }
10730    if (mRawDumpChannel) {
10731        rc = mRawDumpChannel->start();
10732        if (rc < 0) {
10733            LOGE("RAW dump channel start failed");
10734            return rc;
10735        }
10736    }
10737
10738    LOGD("All channels started");
10739    return rc;
10740}
10741
10742/*===========================================================================
10743 * FUNCTION   : notifyErrorForPendingRequests
10744 *
10745 * DESCRIPTION: This function sends error for all the pending requests/buffers
10746 *
10747 * PARAMETERS : None
10748 *
10749 * RETURN     : Error codes
10750 *              NO_ERROR on success
10751 *
10752 *==========================================================================*/
10753int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10754{
10755    int32_t rc = NO_ERROR;
10756    unsigned int frameNum = 0;
10757    camera3_capture_result_t result;
10758    camera3_stream_buffer_t *pStream_Buf = NULL;
10759
10760    memset(&result, 0, sizeof(camera3_capture_result_t));
10761
10762    if (mPendingRequestsList.size() > 0) {
10763        pendingRequestIterator i = mPendingRequestsList.begin();
10764        frameNum = i->frame_number;
10765    } else {
10766        /* There might still be pending buffers even though there are
10767         no pending requests. Setting the frameNum to MAX so that
10768         all the buffers with smaller frame numbers are returned */
10769        frameNum = UINT_MAX;
10770    }
10771
10772    LOGH("Oldest frame num on mPendingRequestsList = %u",
10773       frameNum);
10774
10775    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10776            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10777
10778        if (req->frame_number < frameNum) {
10779            // Send Error notify to frameworks for each buffer for which
10780            // metadata buffer is already sent
10781            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10782                req->frame_number, req->mPendingBufferList.size());
10783
10784            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10785            if (NULL == pStream_Buf) {
10786                LOGE("No memory for pending buffers array");
10787                return NO_MEMORY;
10788            }
10789            memset(pStream_Buf, 0,
10790                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10791            result.result = NULL;
10792            result.frame_number = req->frame_number;
10793            result.num_output_buffers = req->mPendingBufferList.size();
10794            result.output_buffers = pStream_Buf;
10795
10796            size_t index = 0;
10797            for (auto info = req->mPendingBufferList.begin();
10798                info != req->mPendingBufferList.end(); ) {
10799
10800                camera3_notify_msg_t notify_msg;
10801                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10802                notify_msg.type = CAMERA3_MSG_ERROR;
10803                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10804                notify_msg.message.error.error_stream = info->stream;
10805                notify_msg.message.error.frame_number = req->frame_number;
10806                pStream_Buf[index].acquire_fence = -1;
10807                pStream_Buf[index].release_fence = -1;
10808                pStream_Buf[index].buffer = info->buffer;
10809                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10810                pStream_Buf[index].stream = info->stream;
10811                mCallbackOps->notify(mCallbackOps, &notify_msg);
10812                index++;
10813                // Remove buffer from list
10814                info = req->mPendingBufferList.erase(info);
10815            }
10816
10817            // Remove this request from Map
10818            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10819                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10820            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10821
10822            mCallbackOps->process_capture_result(mCallbackOps, &result);
10823
10824            delete [] pStream_Buf;
10825        } else {
10826
10827            // Go through the pending requests info and send error request to framework
10828            LOGE("Sending ERROR REQUEST for all pending requests");
10829            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10830
10831            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10832
10833            // Send error notify to frameworks
10834            camera3_notify_msg_t notify_msg;
10835            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10836            notify_msg.type = CAMERA3_MSG_ERROR;
10837            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10838            notify_msg.message.error.error_stream = NULL;
10839            notify_msg.message.error.frame_number = req->frame_number;
10840            mCallbackOps->notify(mCallbackOps, &notify_msg);
10841
10842            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10843            if (NULL == pStream_Buf) {
10844                LOGE("No memory for pending buffers array");
10845                return NO_MEMORY;
10846            }
10847            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10848
10849            result.result = NULL;
10850            result.frame_number = req->frame_number;
10851            result.input_buffer = i->input_buffer;
10852            result.num_output_buffers = req->mPendingBufferList.size();
10853            result.output_buffers = pStream_Buf;
10854
10855            size_t index = 0;
10856            for (auto info = req->mPendingBufferList.begin();
10857                info != req->mPendingBufferList.end(); ) {
10858                pStream_Buf[index].acquire_fence = -1;
10859                pStream_Buf[index].release_fence = -1;
10860                pStream_Buf[index].buffer = info->buffer;
10861                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10862                pStream_Buf[index].stream = info->stream;
10863                index++;
10864                // Remove buffer from list
10865                info = req->mPendingBufferList.erase(info);
10866            }
10867
10868            // Remove this request from Map
10869            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10870                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10871            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10872
10873            mCallbackOps->process_capture_result(mCallbackOps, &result);
10874            delete [] pStream_Buf;
10875            i = erasePendingRequest(i);
10876        }
10877    }
10878
10879    /* Reset pending frame Drop list and requests list */
10880    mPendingFrameDropList.clear();
10881
10882    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10883        req.mPendingBufferList.clear();
10884    }
10885    mPendingBuffersMap.mPendingBuffersInRequest.clear();
10886    mPendingReprocessResultList.clear();
10887    LOGH("Cleared all the pending buffers ");
10888
10889    return rc;
10890}
10891
10892bool QCamera3HardwareInterface::isOnEncoder(
10893        const cam_dimension_t max_viewfinder_size,
10894        uint32_t width, uint32_t height)
10895{
10896    return (width > (uint32_t)max_viewfinder_size.width ||
10897            height > (uint32_t)max_viewfinder_size.height);
10898}
10899
10900/*===========================================================================
10901 * FUNCTION   : setBundleInfo
10902 *
10903 * DESCRIPTION: Set bundle info for all streams that are bundle.
10904 *
10905 * PARAMETERS : None
10906 *
10907 * RETURN     : NO_ERROR on success
10908 *              Error codes on failure
10909 *==========================================================================*/
10910int32_t QCamera3HardwareInterface::setBundleInfo()
10911{
10912    int32_t rc = NO_ERROR;
10913
10914    if (mChannelHandle) {
10915        cam_bundle_config_t bundleInfo;
10916        memset(&bundleInfo, 0, sizeof(bundleInfo));
10917        rc = mCameraHandle->ops->get_bundle_info(
10918                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10919        if (rc != NO_ERROR) {
10920            LOGE("get_bundle_info failed");
10921            return rc;
10922        }
10923        if (mAnalysisChannel) {
10924            mAnalysisChannel->setBundleInfo(bundleInfo);
10925        }
10926        if (mSupportChannel) {
10927            mSupportChannel->setBundleInfo(bundleInfo);
10928        }
10929        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10930                it != mStreamInfo.end(); it++) {
10931            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10932            channel->setBundleInfo(bundleInfo);
10933        }
10934        if (mRawDumpChannel) {
10935            mRawDumpChannel->setBundleInfo(bundleInfo);
10936        }
10937    }
10938
10939    return rc;
10940}
10941
10942/*===========================================================================
10943 * FUNCTION   : get_num_overall_buffers
10944 *
10945 * DESCRIPTION: Estimate number of pending buffers across all requests.
10946 *
10947 * PARAMETERS : None
10948 *
10949 * RETURN     : Number of overall pending buffers
10950 *
10951 *==========================================================================*/
10952uint32_t PendingBuffersMap::get_num_overall_buffers()
10953{
10954    uint32_t sum_buffers = 0;
10955    for (auto &req : mPendingBuffersInRequest) {
10956        sum_buffers += req.mPendingBufferList.size();
10957    }
10958    return sum_buffers;
10959}
10960
10961/*===========================================================================
10962 * FUNCTION   : removeBuf
10963 *
10964 * DESCRIPTION: Remove a matching buffer from tracker.
10965 *
10966 * PARAMETERS : @buffer: image buffer for the callback
10967 *
10968 * RETURN     : None
10969 *
10970 *==========================================================================*/
10971void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10972{
10973    bool buffer_found = false;
10974    for (auto req = mPendingBuffersInRequest.begin();
10975            req != mPendingBuffersInRequest.end(); req++) {
10976        for (auto k = req->mPendingBufferList.begin();
10977                k != req->mPendingBufferList.end(); k++ ) {
10978            if (k->buffer == buffer) {
10979                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10980                        req->frame_number, buffer);
10981                k = req->mPendingBufferList.erase(k);
10982                if (req->mPendingBufferList.empty()) {
10983                    // Remove this request from Map
10984                    req = mPendingBuffersInRequest.erase(req);
10985                }
10986                buffer_found = true;
10987                break;
10988            }
10989        }
10990        if (buffer_found) {
10991            break;
10992        }
10993    }
10994    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
10995            get_num_overall_buffers());
10996}
10997
10998/*===========================================================================
10999 * FUNCTION   : setPAAFSupport
11000 *
11001 * DESCRIPTION: Set the preview-assisted auto focus support bit in
11002 *              feature mask according to stream type and filter
11003 *              arrangement
11004 *
11005 * PARAMETERS : @feature_mask: current feature mask, which may be modified
11006 *              @stream_type: stream type
11007 *              @filter_arrangement: filter arrangement
11008 *
11009 * RETURN     : None
11010 *==========================================================================*/
11011void QCamera3HardwareInterface::setPAAFSupport(
11012        cam_feature_mask_t& feature_mask,
11013        cam_stream_type_t stream_type,
11014        cam_color_filter_arrangement_t filter_arrangement)
11015{
11016    LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11017            feature_mask, stream_type, filter_arrangement);
11018
11019    switch (filter_arrangement) {
11020    case CAM_FILTER_ARRANGEMENT_RGGB:
11021    case CAM_FILTER_ARRANGEMENT_GRBG:
11022    case CAM_FILTER_ARRANGEMENT_GBRG:
11023    case CAM_FILTER_ARRANGEMENT_BGGR:
11024        if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11025                (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11026            feature_mask |= CAM_QCOM_FEATURE_PAAF;
11027        }
11028        break;
11029    case CAM_FILTER_ARRANGEMENT_Y:
11030        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11031            feature_mask |= CAM_QCOM_FEATURE_PAAF;
11032        }
11033        break;
11034    default:
11035        break;
11036    }
11037}
11038
11039/*===========================================================================
11040 * FUNCTION   : adjustBlackLevelForCFA
11041 *
11042 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
11043 *              of bayer CFA (Color Filter Array).
11044 *
11045 * PARAMETERS : @input: black level pattern in the order of RGGB
11046 *              @output: black level pattern in the order of CFA
11047 *              @color_arrangement: CFA color arrangement
11048 *
11049 * RETURN     : None
11050 *==========================================================================*/
11051template<typename T>
11052void QCamera3HardwareInterface::adjustBlackLevelForCFA(
11053        T input[BLACK_LEVEL_PATTERN_CNT],
11054        T output[BLACK_LEVEL_PATTERN_CNT],
11055        cam_color_filter_arrangement_t color_arrangement)
11056{
11057    switch (color_arrangement) {
11058    case CAM_FILTER_ARRANGEMENT_GRBG:
11059        output[0] = input[1];
11060        output[1] = input[0];
11061        output[2] = input[3];
11062        output[3] = input[2];
11063        break;
11064    case CAM_FILTER_ARRANGEMENT_GBRG:
11065        output[0] = input[2];
11066        output[1] = input[3];
11067        output[2] = input[0];
11068        output[3] = input[1];
11069        break;
11070    case CAM_FILTER_ARRANGEMENT_BGGR:
11071        output[0] = input[3];
11072        output[1] = input[2];
11073        output[2] = input[1];
11074        output[3] = input[0];
11075        break;
11076    case CAM_FILTER_ARRANGEMENT_RGGB:
11077        output[0] = input[0];
11078        output[1] = input[1];
11079        output[2] = input[2];
11080        output[3] = input[3];
11081        break;
11082    default:
11083        LOGE("Invalid color arrangement to derive dynamic blacklevel");
11084        break;
11085    }
11086}
11087}; //end namespace qcamera
11088