1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <time.h>
44#include <sync/sync.h>
45#include "gralloc_priv.h"
46
47// Display dependencies
48#include "qdMetaData.h"
49
50// Camera dependencies
51#include "android/QCamera3External.h"
52#include "util/QCameraFlash.h"
53#include "QCamera3HWI.h"
54#include "QCamera3VendorTags.h"
55#include "QCameraTrace.h"
56
57extern "C" {
58#include "mm_camera_dbg.h"
59}
60#include "cam_cond.h"
61
62using namespace android;
63
64namespace qcamera {
65
66#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
67
68#define EMPTY_PIPELINE_DELAY 2
69#define PARTIAL_RESULT_COUNT 2
70#define FRAME_SKIP_DELAY     0
71
72#define MAX_VALUE_8BIT ((1<<8)-1)
73#define MAX_VALUE_10BIT ((1<<10)-1)
74#define MAX_VALUE_12BIT ((1<<12)-1)
75
76#define VIDEO_4K_WIDTH  3840
77#define VIDEO_4K_HEIGHT 2160
78
79#define MAX_EIS_WIDTH 3840
80#define MAX_EIS_HEIGHT 2160
81
82#define MAX_RAW_STREAMS        1
83#define MAX_STALLING_STREAMS   1
84#define MAX_PROCESSED_STREAMS  3
85/* Batch mode is enabled only if FPS set is equal to or greater than this */
86#define MIN_FPS_FOR_BATCH_MODE (120)
87#define PREVIEW_FPS_FOR_HFR    (30)
88#define DEFAULT_VIDEO_FPS      (30.0)
89#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
90#define MAX_HFR_BATCH_SIZE     (8)
91#define REGIONS_TUPLE_COUNT    5
92#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
93// Set a threshold for detection of missing buffers //seconds
94#define MISSING_REQUEST_BUF_TIMEOUT 3
95#define FLUSH_TIMEOUT 3
96#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
97
98#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
99                                              CAM_QCOM_FEATURE_CROP |\
100                                              CAM_QCOM_FEATURE_ROTATION |\
101                                              CAM_QCOM_FEATURE_SHARPNESS |\
102                                              CAM_QCOM_FEATURE_SCALE |\
103                                              CAM_QCOM_FEATURE_CAC |\
104                                              CAM_QCOM_FEATURE_CDS )
105/* Per configuration size for static metadata length*/
106#define PER_CONFIGURATION_SIZE_3 (3)
107
108#define TIMEOUT_NEVER -1
109
110// Whether to check for the GPU stride padding, or use the default
111//#define CHECK_GPU_PIXEL_ALIGNMENT
112
113cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
114const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
115extern pthread_mutex_t gCamLock;
116volatile uint32_t gCamHal3LogLevel = 1;
117extern uint8_t gNumCameraSessions;
118
119const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
120    {"On",  CAM_CDS_MODE_ON},
121    {"Off", CAM_CDS_MODE_OFF},
122    {"Auto",CAM_CDS_MODE_AUTO}
123};
124
125const QCamera3HardwareInterface::QCameraMap<
126        camera_metadata_enum_android_control_effect_mode_t,
127        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
128    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
129    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
130    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
131    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
132    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
133    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
134    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
135    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
136    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
137};
138
139const QCamera3HardwareInterface::QCameraMap<
140        camera_metadata_enum_android_control_awb_mode_t,
141        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
142    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
143    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
144    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
145    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
146    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
147    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
148    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
149    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
150    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
151};
152
153const QCamera3HardwareInterface::QCameraMap<
154        camera_metadata_enum_android_control_scene_mode_t,
155        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
156    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
157    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
158    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
159    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
160    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
161    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
162    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
163    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
164    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
165    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
166    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
167    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
168    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
169    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
170    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
171    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
172};
173
174const QCamera3HardwareInterface::QCameraMap<
175        camera_metadata_enum_android_control_af_mode_t,
176        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
177    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
178    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
179    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
180    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
181    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
182    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
183    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
184};
185
186const QCamera3HardwareInterface::QCameraMap<
187        camera_metadata_enum_android_color_correction_aberration_mode_t,
188        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
189    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
190            CAM_COLOR_CORRECTION_ABERRATION_OFF },
191    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
192            CAM_COLOR_CORRECTION_ABERRATION_FAST },
193    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
194            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
195};
196
197const QCamera3HardwareInterface::QCameraMap<
198        camera_metadata_enum_android_control_ae_antibanding_mode_t,
199        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
200    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
201    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
202    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
203    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
204};
205
206const QCamera3HardwareInterface::QCameraMap<
207        camera_metadata_enum_android_control_ae_mode_t,
208        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
209    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
210    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
211    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
212    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
213    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
214};
215
216const QCamera3HardwareInterface::QCameraMap<
217        camera_metadata_enum_android_flash_mode_t,
218        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
219    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
220    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
221    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
222};
223
224const QCamera3HardwareInterface::QCameraMap<
225        camera_metadata_enum_android_statistics_face_detect_mode_t,
226        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
227    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
228    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
229    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
230};
231
232const QCamera3HardwareInterface::QCameraMap<
233        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
234        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
235    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
236      CAM_FOCUS_UNCALIBRATED },
237    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
238      CAM_FOCUS_APPROXIMATE },
239    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
240      CAM_FOCUS_CALIBRATED }
241};
242
243const QCamera3HardwareInterface::QCameraMap<
244        camera_metadata_enum_android_lens_state_t,
245        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
246    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
247    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
248};
249
250const int32_t available_thumbnail_sizes[] = {0, 0,
251                                             176, 144,
252                                             240, 144,
253                                             256, 144,
254                                             240, 160,
255                                             256, 154,
256                                             240, 240,
257                                             320, 240};
258
259const QCamera3HardwareInterface::QCameraMap<
260        camera_metadata_enum_android_sensor_test_pattern_mode_t,
261        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
262    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
263    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
264    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
265    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
266    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
267    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
268};
269
270/* Since there is no mapping for all the options some Android enum are not listed.
271 * Also, the order in this list is important because while mapping from HAL to Android it will
272 * traverse from lower to higher index which means that for HAL values that are map to different
273 * Android values, the traverse logic will select the first one found.
274 */
275const QCamera3HardwareInterface::QCameraMap<
276        camera_metadata_enum_android_sensor_reference_illuminant1_t,
277        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
278    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
279    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
280    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
289    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
290    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
291    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
292    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
293    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
294};
295
296const QCamera3HardwareInterface::QCameraMap<
297        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
298    { 60, CAM_HFR_MODE_60FPS},
299    { 90, CAM_HFR_MODE_90FPS},
300    { 120, CAM_HFR_MODE_120FPS},
301    { 150, CAM_HFR_MODE_150FPS},
302    { 180, CAM_HFR_MODE_180FPS},
303    { 210, CAM_HFR_MODE_210FPS},
304    { 240, CAM_HFR_MODE_240FPS},
305    { 480, CAM_HFR_MODE_480FPS},
306};
307
308camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
309    .initialize                         = QCamera3HardwareInterface::initialize,
310    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
311    .register_stream_buffers            = NULL,
312    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
313    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
314    .get_metadata_vendor_tag_ops        = NULL,
315    .dump                               = QCamera3HardwareInterface::dump,
316    .flush                              = QCamera3HardwareInterface::flush,
317    .reserved                           = {0},
318};
319
320// initialise to some default value
321uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
322
323/*===========================================================================
324 * FUNCTION   : QCamera3HardwareInterface
325 *
326 * DESCRIPTION: constructor of QCamera3HardwareInterface
327 *
328 * PARAMETERS :
329 *   @cameraId  : camera ID
330 *
331 * RETURN     : none
332 *==========================================================================*/
333QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
334        const camera_module_callbacks_t *callbacks)
335    : mCameraId(cameraId),
336      mCameraHandle(NULL),
337      mCameraInitialized(false),
338      mCallbackOps(NULL),
339      mMetadataChannel(NULL),
340      mPictureChannel(NULL),
341      mRawChannel(NULL),
342      mSupportChannel(NULL),
343      mAnalysisChannel(NULL),
344      mRawDumpChannel(NULL),
345      mDummyBatchChannel(NULL),
346      m_perfLock(),
347      mCommon(),
348      mChannelHandle(0),
349      mFirstConfiguration(true),
350      mFlush(false),
351      mFlushPerf(false),
352      mParamHeap(NULL),
353      mParameters(NULL),
354      mPrevParameters(NULL),
355      m_bIsVideo(false),
356      m_bIs4KVideo(false),
357      m_bEisSupportedSize(false),
358      m_bEisEnable(false),
359      m_MobicatMask(0),
360      mMinProcessedFrameDuration(0),
361      mMinJpegFrameDuration(0),
362      mMinRawFrameDuration(0),
363      mMetaFrameCount(0U),
364      mUpdateDebugLevel(false),
365      mCallbacks(callbacks),
366      mCaptureIntent(0),
367      mCacMode(0),
368      mHybridAeEnable(0),
369      /* DevCamDebug metadata internal m control*/
370      mDevCamDebugMetaEnable(0),
371      /* DevCamDebug metadata end */
372      mBatchSize(0),
373      mToBeQueuedVidBufs(0),
374      mHFRVideoFps(DEFAULT_VIDEO_FPS),
375      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
376      mFirstFrameNumberInBatch(0),
377      mNeedSensorRestart(false),
378      mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
379      mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
380      mLdafCalibExist(false),
381      mPowerHintEnabled(false),
382      mLastCustIntentFrmNum(-1),
383      mState(CLOSED),
384      mIsDeviceLinked(false),
385      mIsMainCamera(true),
386      mLinkedCameraId(0),
387      m_pRelCamSyncHeap(NULL),
388      m_pRelCamSyncBuf(NULL)
389{
390    getLogLevel();
391    m_perfLock.lock_init();
392    mCommon.init(gCamCapability[cameraId]);
393    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
394    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
395    mCameraDevice.common.close = close_camera_device;
396    mCameraDevice.ops = &mCameraOps;
397    mCameraDevice.priv = this;
398    gCamCapability[cameraId]->version = CAM_HAL_V3;
399    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
400    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
401    gCamCapability[cameraId]->min_num_pp_bufs = 3;
402
403    PTHREAD_COND_INIT(&mBuffersCond);
404
405    PTHREAD_COND_INIT(&mRequestCond);
406    mPendingLiveRequest = 0;
407    mCurrentRequestId = -1;
408    pthread_mutex_init(&mMutex, NULL);
409
410    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
411        mDefaultMetadata[i] = NULL;
412
413    // Getting system props of different kinds
414    char prop[PROPERTY_VALUE_MAX];
415    memset(prop, 0, sizeof(prop));
416    property_get("persist.camera.raw.dump", prop, "0");
417    mEnableRawDump = atoi(prop);
418    if (mEnableRawDump)
419        LOGD("Raw dump from Camera HAL enabled");
420
421    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
422    memset(mLdafCalib, 0, sizeof(mLdafCalib));
423
424    memset(prop, 0, sizeof(prop));
425    property_get("persist.camera.tnr.preview", prop, "0");
426    m_bTnrPreview = (uint8_t)atoi(prop);
427
428    memset(prop, 0, sizeof(prop));
429    property_get("persist.camera.tnr.video", prop, "0");
430    m_bTnrVideo = (uint8_t)atoi(prop);
431
432    memset(prop, 0, sizeof(prop));
433    property_get("persist.camera.avtimer.debug", prop, "0");
434    m_debug_avtimer = (uint8_t)atoi(prop);
435
436    //Load and read GPU library.
437    lib_surface_utils = NULL;
438    LINK_get_surface_pixel_alignment = NULL;
439    mSurfaceStridePadding = CAM_PAD_TO_64;
440#ifdef CHECK_GPU_PIXEL_ALIGNMENT
441    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
442    if (lib_surface_utils) {
443        *(void **)&LINK_get_surface_pixel_alignment =
444                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
445         if (LINK_get_surface_pixel_alignment) {
446             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
447         }
448         dlclose(lib_surface_utils);
449    }
450#endif
451    m60HzZone = is60HzZone();
452}
453
454/*===========================================================================
455 * FUNCTION   : ~QCamera3HardwareInterface
456 *
457 * DESCRIPTION: destructor of QCamera3HardwareInterface
458 *
459 * PARAMETERS : none
460 *
461 * RETURN     : none
462 *==========================================================================*/
463QCamera3HardwareInterface::~QCamera3HardwareInterface()
464{
465    LOGD("E");
466
467    /* Turn off current power hint before acquiring perfLock in case they
468     * conflict with each other */
469    disablePowerHint();
470
471    m_perfLock.lock_acq();
472
473    /* We need to stop all streams before deleting any stream */
474    if (mRawDumpChannel) {
475        mRawDumpChannel->stop();
476    }
477
478    // NOTE: 'camera3_stream_t *' objects are already freed at
479    //        this stage by the framework
480    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
481        it != mStreamInfo.end(); it++) {
482        QCamera3ProcessingChannel *channel = (*it)->channel;
483        if (channel) {
484            channel->stop();
485        }
486    }
487    if (mSupportChannel)
488        mSupportChannel->stop();
489
490    if (mAnalysisChannel) {
491        mAnalysisChannel->stop();
492    }
493    if (mMetadataChannel) {
494        mMetadataChannel->stop();
495    }
496    if (mChannelHandle) {
497        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
498                mChannelHandle);
499        LOGD("stopping channel %d", mChannelHandle);
500    }
501
502    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
503        it != mStreamInfo.end(); it++) {
504        QCamera3ProcessingChannel *channel = (*it)->channel;
505        if (channel)
506            delete channel;
507        free (*it);
508    }
509    if (mSupportChannel) {
510        delete mSupportChannel;
511        mSupportChannel = NULL;
512    }
513
514    if (mAnalysisChannel) {
515        delete mAnalysisChannel;
516        mAnalysisChannel = NULL;
517    }
518    if (mRawDumpChannel) {
519        delete mRawDumpChannel;
520        mRawDumpChannel = NULL;
521    }
522    if (mDummyBatchChannel) {
523        delete mDummyBatchChannel;
524        mDummyBatchChannel = NULL;
525    }
526    mPictureChannel = NULL;
527
528    if (mMetadataChannel) {
529        delete mMetadataChannel;
530        mMetadataChannel = NULL;
531    }
532
533    /* Clean up all channels */
534    if (mCameraInitialized) {
535        if(!mFirstConfiguration){
536            //send the last unconfigure
537            cam_stream_size_info_t stream_config_info;
538            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
539            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
540            stream_config_info.buffer_info.max_buffers =
541                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
542            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
543                    stream_config_info);
544            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
545            if (rc < 0) {
546                LOGE("set_parms failed for unconfigure");
547            }
548        }
549        deinitParameters();
550    }
551
552    if (mChannelHandle) {
553        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
554                mChannelHandle);
555        LOGH("deleting channel %d", mChannelHandle);
556        mChannelHandle = 0;
557    }
558
559    if (mState != CLOSED)
560        closeCamera();
561
562    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
563        req.mPendingBufferList.clear();
564    }
565    mPendingBuffersMap.mPendingBuffersInRequest.clear();
566    mPendingReprocessResultList.clear();
567    for (pendingRequestIterator i = mPendingRequestsList.begin();
568            i != mPendingRequestsList.end();) {
569        i = erasePendingRequest(i);
570    }
571    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
572        if (mDefaultMetadata[i])
573            free_camera_metadata(mDefaultMetadata[i]);
574
575    m_perfLock.lock_rel();
576    m_perfLock.lock_deinit();
577
578    pthread_cond_destroy(&mRequestCond);
579
580    pthread_cond_destroy(&mBuffersCond);
581
582    pthread_mutex_destroy(&mMutex);
583    LOGD("X");
584}
585
586/*===========================================================================
587 * FUNCTION   : erasePendingRequest
588 *
589 * DESCRIPTION: function to erase a desired pending request after freeing any
590 *              allocated memory
591 *
592 * PARAMETERS :
593 *   @i       : iterator pointing to pending request to be erased
594 *
595 * RETURN     : iterator pointing to the next request
596 *==========================================================================*/
597QCamera3HardwareInterface::pendingRequestIterator
598        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
599{
600    if (i->input_buffer != NULL) {
601        free(i->input_buffer);
602        i->input_buffer = NULL;
603    }
604    if (i->settings != NULL)
605        free_camera_metadata((camera_metadata_t*)i->settings);
606    return mPendingRequestsList.erase(i);
607}
608
609/*===========================================================================
610 * FUNCTION   : camEvtHandle
611 *
612 * DESCRIPTION: Function registered to mm-camera-interface to handle events
613 *
614 * PARAMETERS :
615 *   @camera_handle : interface layer camera handle
616 *   @evt           : ptr to event
617 *   @user_data     : user data ptr
618 *
619 * RETURN     : none
620 *==========================================================================*/
621void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
622                                          mm_camera_event_t *evt,
623                                          void *user_data)
624{
625    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
626    if (obj && evt) {
627        switch(evt->server_event_type) {
628            case CAM_EVENT_TYPE_DAEMON_DIED:
629                pthread_mutex_lock(&obj->mMutex);
630                obj->mState = ERROR;
631                pthread_mutex_unlock(&obj->mMutex);
632                LOGE("Fatal, camera daemon died");
633                break;
634
635            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
636                LOGD("HAL got request pull from Daemon");
637                pthread_mutex_lock(&obj->mMutex);
638                obj->mWokenUpByDaemon = true;
639                obj->unblockRequestIfNecessary();
640                pthread_mutex_unlock(&obj->mMutex);
641                break;
642
643            default:
644                LOGW("Warning: Unhandled event %d",
645                        evt->server_event_type);
646                break;
647        }
648    } else {
649        LOGE("NULL user_data/evt");
650    }
651}
652
653/*===========================================================================
654 * FUNCTION   : openCamera
655 *
656 * DESCRIPTION: open camera
657 *
658 * PARAMETERS :
659 *   @hw_device  : double ptr for camera device struct
660 *
661 * RETURN     : int32_t type of status
662 *              NO_ERROR  -- success
663 *              none-zero failure code
664 *==========================================================================*/
665int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
666{
667    int rc = 0;
668    if (mState != CLOSED) {
669        *hw_device = NULL;
670        return PERMISSION_DENIED;
671    }
672
673    m_perfLock.lock_acq();
674    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
675             mCameraId);
676
677    rc = openCamera();
678    if (rc == 0) {
679        *hw_device = &mCameraDevice.common;
680    } else
681        *hw_device = NULL;
682
683    m_perfLock.lock_rel();
684    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
685             mCameraId, rc);
686
687    if (rc == NO_ERROR) {
688        mState = OPENED;
689    }
690    return rc;
691}
692
693/*===========================================================================
694 * FUNCTION   : openCamera
695 *
696 * DESCRIPTION: open camera
697 *
698 * PARAMETERS : none
699 *
700 * RETURN     : int32_t type of status
701 *              NO_ERROR  -- success
702 *              none-zero failure code
703 *==========================================================================*/
704int QCamera3HardwareInterface::openCamera()
705{
706    int rc = 0;
707    char value[PROPERTY_VALUE_MAX];
708
709    KPI_ATRACE_CALL();
710    if (mCameraHandle) {
711        LOGE("Failure: Camera already opened");
712        return ALREADY_EXISTS;
713    }
714
715    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
716    if (rc < 0) {
717        LOGE("Failed to reserve flash for camera id: %d",
718                mCameraId);
719        return UNKNOWN_ERROR;
720    }
721
722    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
723    if (rc) {
724        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
725        return rc;
726    }
727
728    if (!mCameraHandle) {
729        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
730        return -ENODEV;
731    }
732
733    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
734            camEvtHandle, (void *)this);
735
736    if (rc < 0) {
737        LOGE("Error, failed to register event callback");
738        /* Not closing camera here since it is already handled in destructor */
739        return FAILED_TRANSACTION;
740    }
741
742    mExifParams.debug_params =
743            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
744    if (mExifParams.debug_params) {
745        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
746    } else {
747        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
748        return NO_MEMORY;
749    }
750    mFirstConfiguration = true;
751
752    //Notify display HAL that a camera session is active.
753    //But avoid calling the same during bootup because camera service might open/close
754    //cameras at boot time during its initialization and display service will also internally
755    //wait for camera service to initialize first while calling this display API, resulting in a
756    //deadlock situation. Since boot time camera open/close calls are made only to fetch
757    //capabilities, no need of this display bw optimization.
758    //Use "service.bootanim.exit" property to know boot status.
759    property_get("service.bootanim.exit", value, "0");
760    if (atoi(value) == 1) {
761        pthread_mutex_lock(&gCamLock);
762        if (gNumCameraSessions++ == 0) {
763            setCameraLaunchStatus(true);
764        }
765        pthread_mutex_unlock(&gCamLock);
766    }
767
768    //fill the session id needed while linking dual cam
769    pthread_mutex_lock(&gCamLock);
770    rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
771        &sessionId[mCameraId]);
772    pthread_mutex_unlock(&gCamLock);
773
774    if (rc < 0) {
775        LOGE("Error, failed to get sessiion id");
776        return UNKNOWN_ERROR;
777    } else {
778        //Allocate related cam sync buffer
779        //this is needed for the payload that goes along with bundling cmd for related
780        //camera use cases
781        m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
782        rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
783        if(rc != OK) {
784            rc = NO_MEMORY;
785            LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
786            return NO_MEMORY;
787        }
788
789        //Map memory for related cam sync buffer
790        rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
791                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
792                m_pRelCamSyncHeap->getFd(0),
793                sizeof(cam_sync_related_sensors_event_info_t),
794                m_pRelCamSyncHeap->getPtr(0));
795        if(rc < 0) {
796            LOGE("Dualcam: failed to map Related cam sync buffer");
797            rc = FAILED_TRANSACTION;
798            return NO_MEMORY;
799        }
800        m_pRelCamSyncBuf =
801                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
802    }
803
804    LOGH("mCameraId=%d",mCameraId);
805
806    return NO_ERROR;
807}
808
809/*===========================================================================
810 * FUNCTION   : closeCamera
811 *
812 * DESCRIPTION: close camera
813 *
814 * PARAMETERS : none
815 *
816 * RETURN     : int32_t type of status
817 *              NO_ERROR  -- success
818 *              none-zero failure code
819 *==========================================================================*/
820int QCamera3HardwareInterface::closeCamera()
821{
822    KPI_ATRACE_CALL();
823    int rc = NO_ERROR;
824    char value[PROPERTY_VALUE_MAX];
825
826    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
827             mCameraId);
828    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
829    mCameraHandle = NULL;
830
831    //reset session id to some invalid id
832    pthread_mutex_lock(&gCamLock);
833    sessionId[mCameraId] = 0xDEADBEEF;
834    pthread_mutex_unlock(&gCamLock);
835
836    //Notify display HAL that there is no active camera session
837    //but avoid calling the same during bootup. Refer to openCamera
838    //for more details.
839    property_get("service.bootanim.exit", value, "0");
840    if (atoi(value) == 1) {
841        pthread_mutex_lock(&gCamLock);
842        if (--gNumCameraSessions == 0) {
843            setCameraLaunchStatus(false);
844        }
845        pthread_mutex_unlock(&gCamLock);
846    }
847
848    if (NULL != m_pRelCamSyncHeap) {
849        m_pRelCamSyncHeap->deallocate();
850        delete m_pRelCamSyncHeap;
851        m_pRelCamSyncHeap = NULL;
852        m_pRelCamSyncBuf = NULL;
853    }
854
855    if (mExifParams.debug_params) {
856        free(mExifParams.debug_params);
857        mExifParams.debug_params = NULL;
858    }
859    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
860        LOGW("Failed to release flash for camera id: %d",
861                mCameraId);
862    }
863    mState = CLOSED;
864    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
865         mCameraId, rc);
866    return rc;
867}
868
869/*===========================================================================
870 * FUNCTION   : initialize
871 *
872 * DESCRIPTION: Initialize frameworks callback functions
873 *
874 * PARAMETERS :
875 *   @callback_ops : callback function to frameworks
876 *
877 * RETURN     :
878 *
879 *==========================================================================*/
880int QCamera3HardwareInterface::initialize(
881        const struct camera3_callback_ops *callback_ops)
882{
883    ATRACE_CALL();
884    int rc;
885
886    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
887    pthread_mutex_lock(&mMutex);
888
889    // Validate current state
890    switch (mState) {
891        case OPENED:
892            /* valid state */
893            break;
894        default:
895            LOGE("Invalid state %d", mState);
896            rc = -ENODEV;
897            goto err1;
898    }
899
900    rc = initParameters();
901    if (rc < 0) {
902        LOGE("initParamters failed %d", rc);
903        goto err1;
904    }
905    mCallbackOps = callback_ops;
906
907    mChannelHandle = mCameraHandle->ops->add_channel(
908            mCameraHandle->camera_handle, NULL, NULL, this);
909    if (mChannelHandle == 0) {
910        LOGE("add_channel failed");
911        rc = -ENOMEM;
912        pthread_mutex_unlock(&mMutex);
913        return rc;
914    }
915
916    pthread_mutex_unlock(&mMutex);
917    mCameraInitialized = true;
918    mState = INITIALIZED;
919    LOGI("X");
920    return 0;
921
922err1:
923    pthread_mutex_unlock(&mMutex);
924    return rc;
925}
926
927/*===========================================================================
928 * FUNCTION   : validateStreamDimensions
929 *
930 * DESCRIPTION: Check if the configuration requested are those advertised
931 *
932 * PARAMETERS :
933 *   @stream_list : streams to be configured
934 *
935 * RETURN     :
936 *
937 *==========================================================================*/
938int QCamera3HardwareInterface::validateStreamDimensions(
939        camera3_stream_configuration_t *streamList)
940{
941    int rc = NO_ERROR;
942    size_t count = 0;
943
944    camera3_stream_t *inputStream = NULL;
945    /*
946    * Loop through all streams to find input stream if it exists*
947    */
948    for (size_t i = 0; i< streamList->num_streams; i++) {
949        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
950            if (inputStream != NULL) {
951                LOGE("Error, Multiple input streams requested");
952                return -EINVAL;
953            }
954            inputStream = streamList->streams[i];
955        }
956    }
957    /*
958    * Loop through all streams requested in configuration
959    * Check if unsupported sizes have been requested on any of them
960    */
961    for (size_t j = 0; j < streamList->num_streams; j++) {
962        bool sizeFound = false;
963        camera3_stream_t *newStream = streamList->streams[j];
964
965        uint32_t rotatedHeight = newStream->height;
966        uint32_t rotatedWidth = newStream->width;
967        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
968                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
969            rotatedHeight = newStream->width;
970            rotatedWidth = newStream->height;
971        }
972
973        /*
974        * Sizes are different for each type of stream format check against
975        * appropriate table.
976        */
977        switch (newStream->format) {
978        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
979        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
980        case HAL_PIXEL_FORMAT_RAW10:
981            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
982            for (size_t i = 0; i < count; i++) {
983                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
984                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
985                    sizeFound = true;
986                    break;
987                }
988            }
989            break;
990        case HAL_PIXEL_FORMAT_BLOB:
991            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
992            /* Verify set size against generated sizes table */
993            for (size_t i = 0; i < count; i++) {
994                if (((int32_t)rotatedWidth ==
995                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
996                        ((int32_t)rotatedHeight ==
997                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
998                    sizeFound = true;
999                    break;
1000                }
1001            }
1002            break;
1003        case HAL_PIXEL_FORMAT_YCbCr_420_888:
1004        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1005        default:
1006            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1007                    || newStream->stream_type == CAMERA3_STREAM_INPUT
1008                    || IS_USAGE_ZSL(newStream->usage)) {
1009                if (((int32_t)rotatedWidth ==
1010                                gCamCapability[mCameraId]->active_array_size.width) &&
1011                                ((int32_t)rotatedHeight ==
1012                                gCamCapability[mCameraId]->active_array_size.height)) {
1013                    sizeFound = true;
1014                    break;
1015                }
1016                /* We could potentially break here to enforce ZSL stream
1017                 * set from frameworks always is full active array size
1018                 * but it is not clear from the spc if framework will always
1019                 * follow that, also we have logic to override to full array
1020                 * size, so keeping the logic lenient at the moment
1021                 */
1022            }
1023            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1024                    MAX_SIZES_CNT);
1025            for (size_t i = 0; i < count; i++) {
1026                if (((int32_t)rotatedWidth ==
1027                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1028                            ((int32_t)rotatedHeight ==
1029                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1030                    sizeFound = true;
1031                    break;
1032                }
1033            }
1034            break;
1035        } /* End of switch(newStream->format) */
1036
1037        /* We error out even if a single stream has unsupported size set */
1038        if (!sizeFound) {
1039            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1040                    rotatedWidth, rotatedHeight, newStream->format,
1041                    gCamCapability[mCameraId]->active_array_size.width,
1042                    gCamCapability[mCameraId]->active_array_size.height);
1043            rc = -EINVAL;
1044            break;
1045        }
1046    } /* End of for each stream */
1047    return rc;
1048}
1049
1050/*===========================================================================
1051 * FUNCTION   : validateUsageFlags
1052 *
1053 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1054 *
1055 * PARAMETERS :
1056 *   @stream_list : streams to be configured
1057 *
1058 * RETURN     :
1059 *   NO_ERROR if the usage flags are supported
1060 *   error code if usage flags are not supported
1061 *
1062 *==========================================================================*/
1063int QCamera3HardwareInterface::validateUsageFlags(
1064        const camera3_stream_configuration_t* streamList)
1065{
1066    for (size_t j = 0; j < streamList->num_streams; j++) {
1067        const camera3_stream_t *newStream = streamList->streams[j];
1068
1069        if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1070            (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1071             newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1072            continue;
1073        }
1074
1075        bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1076        bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1077        bool isZSL = IS_USAGE_ZSL(newStream->usage);
1078        cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1079                CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height);
1080        cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1081                CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height);
1082        cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1083                CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height);
1084
1085        // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1086        // So color spaces will always match.
1087
1088        // Check whether underlying formats of shared streams match.
1089        if (isVideo && isPreview && videoFormat != previewFormat) {
1090            LOGE("Combined video and preview usage flag is not supported");
1091            return -EINVAL;
1092        }
1093        if (isPreview && isZSL && previewFormat != zslFormat) {
1094            LOGE("Combined preview and zsl usage flag is not supported");
1095            return -EINVAL;
1096        }
1097        if (isVideo && isZSL && videoFormat != zslFormat) {
1098            LOGE("Combined video and zsl usage flag is not supported");
1099            return -EINVAL;
1100        }
1101    }
1102    return NO_ERROR;
1103}
1104
1105/*===========================================================================
1106 * FUNCTION   : validateUsageFlagsForEis
1107 *
1108 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1109 *
1110 * PARAMETERS :
1111 *   @stream_list : streams to be configured
1112 *
1113 * RETURN     :
1114 *   NO_ERROR if the usage flags are supported
1115 *   error code if usage flags are not supported
1116 *
1117 *==========================================================================*/
1118int QCamera3HardwareInterface::validateUsageFlagsForEis(
1119        const camera3_stream_configuration_t* streamList)
1120{
1121    for (size_t j = 0; j < streamList->num_streams; j++) {
1122        const camera3_stream_t *newStream = streamList->streams[j];
1123
1124        bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1125        bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1126
1127        // Because EIS is "hard-coded" for certain use case, and current
1128        // implementation doesn't support shared preview and video on the same
1129        // stream, return failure if EIS is forced on.
1130        if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1131            LOGE("Combined video and preview usage flag is not supported due to EIS");
1132            return -EINVAL;
1133        }
1134    }
1135    return NO_ERROR;
1136}
1137
1138
1139/*==============================================================================
1140 * FUNCTION   : isSupportChannelNeeded
1141 *
1142 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1143 *
1144 * PARAMETERS :
1145 *   @stream_list : streams to be configured
1146 *   @stream_config_info : the config info for streams to be configured
1147 *
1148 * RETURN     : Boolen true/false decision
1149 *
1150 *==========================================================================*/
1151bool QCamera3HardwareInterface::isSupportChannelNeeded(
1152        camera3_stream_configuration_t *streamList,
1153        cam_stream_size_info_t stream_config_info)
1154{
1155    uint32_t i;
1156    bool pprocRequested = false;
1157    /* Check for conditions where PProc pipeline does not have any streams*/
1158    for (i = 0; i < stream_config_info.num_streams; i++) {
1159        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1160                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1161            pprocRequested = true;
1162            break;
1163        }
1164    }
1165
1166    if (pprocRequested == false )
1167        return true;
1168
1169    /* Dummy stream needed if only raw or jpeg streams present */
1170    for (i = 0; i < streamList->num_streams; i++) {
1171        switch(streamList->streams[i]->format) {
1172            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1173            case HAL_PIXEL_FORMAT_RAW10:
1174            case HAL_PIXEL_FORMAT_RAW16:
1175            case HAL_PIXEL_FORMAT_BLOB:
1176                break;
1177            default:
1178                return false;
1179        }
1180    }
1181    return true;
1182}
1183
1184/*==============================================================================
1185 * FUNCTION   : getSensorOutputSize
1186 *
1187 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1188 *
1189 * PARAMETERS :
1190 *   @sensor_dim : sensor output dimension (output)
1191 *
1192 * RETURN     : int32_t type of status
1193 *              NO_ERROR  -- success
1194 *              none-zero failure code
1195 *
1196 *==========================================================================*/
1197int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1198{
1199    int32_t rc = NO_ERROR;
1200
1201    cam_dimension_t max_dim = {0, 0};
1202    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1203        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1204            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1205        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1206            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1207    }
1208
1209    clear_metadata_buffer(mParameters);
1210
1211    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1212            max_dim);
1213    if (rc != NO_ERROR) {
1214        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1215        return rc;
1216    }
1217
1218    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1219    if (rc != NO_ERROR) {
1220        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1221        return rc;
1222    }
1223
1224    clear_metadata_buffer(mParameters);
1225    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1226
1227    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1228            mParameters);
1229    if (rc != NO_ERROR) {
1230        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1231        return rc;
1232    }
1233
1234    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1235    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1236
1237    return rc;
1238}
1239
1240/*==============================================================================
1241 * FUNCTION   : enablePowerHint
1242 *
1243 * DESCRIPTION: enable single powerhint for preview and different video modes.
1244 *
1245 * PARAMETERS :
1246 *
1247 * RETURN     : NULL
1248 *
1249 *==========================================================================*/
1250void QCamera3HardwareInterface::enablePowerHint()
1251{
1252    if (!mPowerHintEnabled) {
1253        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1254        mPowerHintEnabled = true;
1255    }
1256}
1257
1258/*==============================================================================
1259 * FUNCTION   : disablePowerHint
1260 *
1261 * DESCRIPTION: disable current powerhint.
1262 *
1263 * PARAMETERS :
1264 *
1265 * RETURN     : NULL
1266 *
1267 *==========================================================================*/
1268void QCamera3HardwareInterface::disablePowerHint()
1269{
1270    if (mPowerHintEnabled) {
1271        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1272        mPowerHintEnabled = false;
1273    }
1274}
1275
1276/*==============================================================================
1277 * FUNCTION   : addToPPFeatureMask
1278 *
1279 * DESCRIPTION: add additional features to pp feature mask based on
1280 *              stream type and usecase
1281 *
1282 * PARAMETERS :
1283 *   @stream_format : stream type for feature mask
1284 *   @stream_idx : stream idx within postprocess_mask list to change
1285 *
1286 * RETURN     : NULL
1287 *
1288 *==========================================================================*/
1289void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1290        uint32_t stream_idx)
1291{
1292    char feature_mask_value[PROPERTY_VALUE_MAX];
1293    cam_feature_mask_t feature_mask;
1294    int args_converted;
1295    int property_len;
1296
1297    /* Get feature mask from property */
1298    property_len = property_get("persist.camera.hal3.feature",
1299            feature_mask_value, "0");
1300    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1301            (feature_mask_value[1] == 'x')) {
1302        args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1303    } else {
1304        args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1305    }
1306    if (1 != args_converted) {
1307        feature_mask = 0;
1308        LOGE("Wrong feature mask %s", feature_mask_value);
1309        return;
1310    }
1311
1312    switch (stream_format) {
1313    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1314        /* Add LLVD to pp feature mask only if video hint is enabled */
1315        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1316            mStreamConfigInfo.postprocess_mask[stream_idx]
1317                    |= CAM_QTI_FEATURE_SW_TNR;
1318            LOGH("Added SW TNR to pp feature mask");
1319        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1320            mStreamConfigInfo.postprocess_mask[stream_idx]
1321                    |= CAM_QCOM_FEATURE_LLVD;
1322            LOGH("Added LLVD SeeMore to pp feature mask");
1323        }
1324        break;
1325    }
1326    default:
1327        break;
1328    }
1329    LOGD("PP feature mask %llx",
1330            mStreamConfigInfo.postprocess_mask[stream_idx]);
1331}
1332
1333/*==============================================================================
1334 * FUNCTION   : updateFpsInPreviewBuffer
1335 *
1336 * DESCRIPTION: update FPS information in preview buffer.
1337 *
1338 * PARAMETERS :
1339 *   @metadata    : pointer to metadata buffer
1340 *   @frame_number: frame_number to look for in pending buffer list
1341 *
1342 * RETURN     : None
1343 *
1344 *==========================================================================*/
1345void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1346        uint32_t frame_number)
1347{
1348    // Mark all pending buffers for this particular request
1349    // with corresponding framerate information
1350    for (List<PendingBuffersInRequest>::iterator req =
1351            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1352            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1353        for(List<PendingBufferInfo>::iterator j =
1354                req->mPendingBufferList.begin();
1355                j != req->mPendingBufferList.end(); j++) {
1356            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1357            if ((req->frame_number == frame_number) &&
1358                (channel->getStreamTypeMask() &
1359                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1360                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1361                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1362                    int32_t cameraFps = float_range->max_fps;
1363                    struct private_handle_t *priv_handle =
1364                        (struct private_handle_t *)(*(j->buffer));
1365                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1366                }
1367            }
1368        }
1369    }
1370}
1371
1372/*==============================================================================
1373 * FUNCTION   : updateTimeStampInPendingBuffers
1374 *
1375 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1376 *              of a frame number
1377 *
1378 * PARAMETERS :
1379 *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1380 *   @timestamp   : timestamp to be set
1381 *
1382 * RETURN     : None
1383 *
1384 *==========================================================================*/
1385void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1386        uint32_t frameNumber, nsecs_t timestamp)
1387{
1388    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1389            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1390        if (req->frame_number != frameNumber)
1391            continue;
1392
1393        for (auto k = req->mPendingBufferList.begin();
1394                k != req->mPendingBufferList.end(); k++ ) {
1395            struct private_handle_t *priv_handle =
1396                    (struct private_handle_t *) (*(k->buffer));
1397            setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1398        }
1399    }
1400    return;
1401}
1402
1403/*===========================================================================
1404 * FUNCTION   : configureStreams
1405 *
1406 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1407 *              and output streams.
1408 *
1409 * PARAMETERS :
1410 *   @stream_list : streams to be configured
1411 *
1412 * RETURN     :
1413 *
1414 *==========================================================================*/
1415int QCamera3HardwareInterface::configureStreams(
1416        camera3_stream_configuration_t *streamList)
1417{
1418    ATRACE_CALL();
1419    int rc = 0;
1420
1421    // Acquire perfLock before configure streams
1422    m_perfLock.lock_acq();
1423    rc = configureStreamsPerfLocked(streamList);
1424    m_perfLock.lock_rel();
1425
1426    return rc;
1427}
1428
1429/*===========================================================================
1430 * FUNCTION   : configureStreamsPerfLocked
1431 *
1432 * DESCRIPTION: configureStreams while perfLock is held.
1433 *
1434 * PARAMETERS :
1435 *   @stream_list : streams to be configured
1436 *
1437 * RETURN     : int32_t type of status
1438 *              NO_ERROR  -- success
1439 *              none-zero failure code
1440 *==========================================================================*/
1441int QCamera3HardwareInterface::configureStreamsPerfLocked(
1442        camera3_stream_configuration_t *streamList)
1443{
1444    ATRACE_CALL();
1445    int rc = 0;
1446
1447    // Sanity check stream_list
1448    if (streamList == NULL) {
1449        LOGE("NULL stream configuration");
1450        return BAD_VALUE;
1451    }
1452    if (streamList->streams == NULL) {
1453        LOGE("NULL stream list");
1454        return BAD_VALUE;
1455    }
1456
1457    if (streamList->num_streams < 1) {
1458        LOGE("Bad number of streams requested: %d",
1459                streamList->num_streams);
1460        return BAD_VALUE;
1461    }
1462
1463    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1464        LOGE("Maximum number of streams %d exceeded: %d",
1465                MAX_NUM_STREAMS, streamList->num_streams);
1466        return BAD_VALUE;
1467    }
1468
1469    rc = validateUsageFlags(streamList);
1470    if (rc != NO_ERROR) {
1471        return rc;
1472    }
1473
1474    mOpMode = streamList->operation_mode;
1475    LOGD("mOpMode: %d", mOpMode);
1476
1477    /* first invalidate all the steams in the mStreamList
1478     * if they appear again, they will be validated */
1479    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1480            it != mStreamInfo.end(); it++) {
1481        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1482        if (channel) {
1483          channel->stop();
1484        }
1485        (*it)->status = INVALID;
1486    }
1487
1488    if (mRawDumpChannel) {
1489        mRawDumpChannel->stop();
1490        delete mRawDumpChannel;
1491        mRawDumpChannel = NULL;
1492    }
1493
1494    if (mSupportChannel)
1495        mSupportChannel->stop();
1496
1497    if (mAnalysisChannel) {
1498        mAnalysisChannel->stop();
1499    }
1500    if (mMetadataChannel) {
1501        /* If content of mStreamInfo is not 0, there is metadata stream */
1502        mMetadataChannel->stop();
1503    }
1504    if (mChannelHandle) {
1505        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1506                mChannelHandle);
1507        LOGD("stopping channel %d", mChannelHandle);
1508    }
1509
1510    pthread_mutex_lock(&mMutex);
1511
1512    // Check state
1513    switch (mState) {
1514        case INITIALIZED:
1515        case CONFIGURED:
1516        case STARTED:
1517            /* valid state */
1518            break;
1519        default:
1520            LOGE("Invalid state %d", mState);
1521            pthread_mutex_unlock(&mMutex);
1522            return -ENODEV;
1523    }
1524
1525    /* Check whether we have video stream */
1526    m_bIs4KVideo = false;
1527    m_bIsVideo = false;
1528    m_bEisSupportedSize = false;
1529    m_bTnrEnabled = false;
1530    bool isZsl = false;
1531    bool isPreview = false;
1532    uint32_t videoWidth = 0U;
1533    uint32_t videoHeight = 0U;
1534    size_t rawStreamCnt = 0;
1535    size_t stallStreamCnt = 0;
1536    size_t processedStreamCnt = 0;
1537    // Number of streams on ISP encoder path
1538    size_t numStreamsOnEncoder = 0;
1539    size_t numYuv888OnEncoder = 0;
1540    bool bYuv888OverrideJpeg = false;
1541    cam_dimension_t largeYuv888Size = {0, 0};
1542    cam_dimension_t maxViewfinderSize = {0, 0};
1543    bool bJpegExceeds4K = false;
1544    bool bJpegOnEncoder = false;
1545    bool bUseCommonFeatureMask = false;
1546    cam_feature_mask_t commonFeatureMask = 0;
1547    bool bSmallJpegSize = false;
1548    uint32_t width_ratio;
1549    uint32_t height_ratio;
1550    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1551    camera3_stream_t *inputStream = NULL;
1552    bool isJpeg = false;
1553    cam_dimension_t jpegSize = {0, 0};
1554
1555    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1556
1557    /*EIS configuration*/
1558    bool eisSupported = false;
1559    bool oisSupported = false;
1560    int32_t margin_index = -1;
1561    uint8_t eis_prop_set;
1562    uint32_t maxEisWidth = 0;
1563    uint32_t maxEisHeight = 0;
1564
1565    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1566
1567    size_t count = IS_TYPE_MAX;
1568    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1569    for (size_t i = 0; i < count; i++) {
1570        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1571            eisSupported = true;
1572            margin_index = (int32_t)i;
1573            break;
1574        }
1575    }
1576
1577    count = CAM_OPT_STAB_MAX;
1578    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1579    for (size_t i = 0; i < count; i++) {
1580        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1581            oisSupported = true;
1582            break;
1583        }
1584    }
1585
1586    if (eisSupported) {
1587        maxEisWidth = MAX_EIS_WIDTH;
1588        maxEisHeight = MAX_EIS_HEIGHT;
1589    }
1590
1591    /* EIS setprop control */
1592    char eis_prop[PROPERTY_VALUE_MAX];
1593    memset(eis_prop, 0, sizeof(eis_prop));
1594    property_get("persist.camera.eis.enable", eis_prop, "0");
1595    eis_prop_set = (uint8_t)atoi(eis_prop);
1596
1597    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1598            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1599
1600    /* stream configurations */
1601    for (size_t i = 0; i < streamList->num_streams; i++) {
1602        camera3_stream_t *newStream = streamList->streams[i];
1603        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1604                "height = %d, rotation = %d, usage = 0x%x",
1605                 i, newStream->stream_type, newStream->format,
1606                newStream->width, newStream->height, newStream->rotation,
1607                newStream->usage);
1608        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1609                newStream->stream_type == CAMERA3_STREAM_INPUT){
1610            isZsl = true;
1611        }
1612        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1613                IS_USAGE_PREVIEW(newStream->usage)) {
1614            isPreview = true;
1615        }
1616
1617        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1618            inputStream = newStream;
1619        }
1620
1621        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1622            isJpeg = true;
1623            jpegSize.width = newStream->width;
1624            jpegSize.height = newStream->height;
1625            if (newStream->width > VIDEO_4K_WIDTH ||
1626                    newStream->height > VIDEO_4K_HEIGHT)
1627                bJpegExceeds4K = true;
1628        }
1629
1630        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1631                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1632            m_bIsVideo = true;
1633            videoWidth = newStream->width;
1634            videoHeight = newStream->height;
1635            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1636                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1637                m_bIs4KVideo = true;
1638            }
1639            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1640                                  (newStream->height <= maxEisHeight);
1641
1642        }
1643        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1644                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1645            switch (newStream->format) {
1646            case HAL_PIXEL_FORMAT_BLOB:
1647                stallStreamCnt++;
1648                if (isOnEncoder(maxViewfinderSize, newStream->width,
1649                        newStream->height)) {
1650                    numStreamsOnEncoder++;
1651                    bJpegOnEncoder = true;
1652                }
1653                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1654                        newStream->width);
1655                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1656                        newStream->height);;
1657                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1658                        "FATAL: max_downscale_factor cannot be zero and so assert");
1659                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1660                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1661                    LOGH("Setting small jpeg size flag to true");
1662                    bSmallJpegSize = true;
1663                }
1664                break;
1665            case HAL_PIXEL_FORMAT_RAW10:
1666            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1667            case HAL_PIXEL_FORMAT_RAW16:
1668                rawStreamCnt++;
1669                break;
1670            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1671                processedStreamCnt++;
1672                if (isOnEncoder(maxViewfinderSize, newStream->width,
1673                        newStream->height)) {
1674                    if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1675                            !IS_USAGE_ZSL(newStream->usage)) {
1676                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1677                    }
1678                    numStreamsOnEncoder++;
1679                }
1680                break;
1681            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1682                processedStreamCnt++;
1683                if (isOnEncoder(maxViewfinderSize, newStream->width,
1684                        newStream->height)) {
1685                    // If Yuv888 size is not greater than 4K, set feature mask
1686                    // to SUPERSET so that it support concurrent request on
1687                    // YUV and JPEG.
1688                    if (newStream->width <= VIDEO_4K_WIDTH &&
1689                            newStream->height <= VIDEO_4K_HEIGHT) {
1690                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1691                    }
1692                    numStreamsOnEncoder++;
1693                    numYuv888OnEncoder++;
1694                    largeYuv888Size.width = newStream->width;
1695                    largeYuv888Size.height = newStream->height;
1696                }
1697                break;
1698            default:
1699                processedStreamCnt++;
1700                if (isOnEncoder(maxViewfinderSize, newStream->width,
1701                        newStream->height)) {
1702                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1703                    numStreamsOnEncoder++;
1704                }
1705                break;
1706            }
1707
1708        }
1709    }
1710
1711    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1712        !m_bIsVideo) {
1713        m_bEisEnable = false;
1714    }
1715
1716    if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1717        pthread_mutex_unlock(&mMutex);
1718        return -EINVAL;
1719    }
1720    /* Logic to enable/disable TNR based on specific config size/etc.*/
1721    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1722            ((videoWidth == 1920 && videoHeight == 1080) ||
1723            (videoWidth == 1280 && videoHeight == 720)) &&
1724            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1725        m_bTnrEnabled = true;
1726
1727    /* Check if num_streams is sane */
1728    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1729            rawStreamCnt > MAX_RAW_STREAMS ||
1730            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1731        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1732                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1733        pthread_mutex_unlock(&mMutex);
1734        return -EINVAL;
1735    }
1736    /* Check whether we have zsl stream or 4k video case */
1737    if (isZsl && m_bIsVideo) {
1738        LOGE("Currently invalid configuration ZSL&Video!");
1739        pthread_mutex_unlock(&mMutex);
1740        return -EINVAL;
1741    }
1742    /* Check if stream sizes are sane */
1743    if (numStreamsOnEncoder > 2) {
1744        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1745        pthread_mutex_unlock(&mMutex);
1746        return -EINVAL;
1747    } else if (1 < numStreamsOnEncoder){
1748        bUseCommonFeatureMask = true;
1749        LOGH("Multiple streams above max viewfinder size, common mask needed");
1750    }
1751
1752    /* Check if BLOB size is greater than 4k in 4k recording case */
1753    if (m_bIs4KVideo && bJpegExceeds4K) {
1754        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1755        pthread_mutex_unlock(&mMutex);
1756        return -EINVAL;
1757    }
1758
1759    // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1760    // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1761    // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1762    // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1763    // configurations:
1764    //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1765    //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1766    //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1767    if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1768        ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1769                __func__);
1770        pthread_mutex_unlock(&mMutex);
1771        return -EINVAL;
1772    }
1773
1774    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1775    // the YUV stream's size is greater or equal to the JPEG size, set common
1776    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1777    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1778            jpegSize.width, jpegSize.height) &&
1779            largeYuv888Size.width > jpegSize.width &&
1780            largeYuv888Size.height > jpegSize.height) {
1781        bYuv888OverrideJpeg = true;
1782    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1783        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1784    }
1785
1786    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1787            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1788            commonFeatureMask);
1789    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1790            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1791
1792    rc = validateStreamDimensions(streamList);
1793    if (rc == NO_ERROR) {
1794        rc = validateStreamRotations(streamList);
1795    }
1796    if (rc != NO_ERROR) {
1797        LOGE("Invalid stream configuration requested!");
1798        pthread_mutex_unlock(&mMutex);
1799        return rc;
1800    }
1801
1802    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1803    camera3_stream_t *jpegStream = NULL;
1804    for (size_t i = 0; i < streamList->num_streams; i++) {
1805        camera3_stream_t *newStream = streamList->streams[i];
1806        LOGH("newStream type = %d, stream format = %d "
1807                "stream size : %d x %d, stream rotation = %d",
1808                 newStream->stream_type, newStream->format,
1809                newStream->width, newStream->height, newStream->rotation);
1810        //if the stream is in the mStreamList validate it
1811        bool stream_exists = false;
1812        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1813                it != mStreamInfo.end(); it++) {
1814            if ((*it)->stream == newStream) {
1815                QCamera3ProcessingChannel *channel =
1816                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1817                stream_exists = true;
1818                if (channel)
1819                    delete channel;
1820                (*it)->status = VALID;
1821                (*it)->stream->priv = NULL;
1822                (*it)->channel = NULL;
1823            }
1824        }
1825        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1826            //new stream
1827            stream_info_t* stream_info;
1828            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1829            if (!stream_info) {
1830               LOGE("Could not allocate stream info");
1831               rc = -ENOMEM;
1832               pthread_mutex_unlock(&mMutex);
1833               return rc;
1834            }
1835            stream_info->stream = newStream;
1836            stream_info->status = VALID;
1837            stream_info->channel = NULL;
1838            mStreamInfo.push_back(stream_info);
1839        }
1840        /* Covers Opaque ZSL and API1 F/W ZSL */
1841        if (IS_USAGE_ZSL(newStream->usage)
1842                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1843            if (zslStream != NULL) {
1844                LOGE("Multiple input/reprocess streams requested!");
1845                pthread_mutex_unlock(&mMutex);
1846                return BAD_VALUE;
1847            }
1848            zslStream = newStream;
1849        }
1850        /* Covers YUV reprocess */
1851        if (inputStream != NULL) {
1852            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1853                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1854                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1855                    && inputStream->width == newStream->width
1856                    && inputStream->height == newStream->height) {
1857                if (zslStream != NULL) {
1858                    /* This scenario indicates multiple YUV streams with same size
1859                     * as input stream have been requested, since zsl stream handle
1860                     * is solely use for the purpose of overriding the size of streams
1861                     * which share h/w streams we will just make a guess here as to
1862                     * which of the stream is a ZSL stream, this will be refactored
1863                     * once we make generic logic for streams sharing encoder output
1864                     */
1865                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1866                }
1867                zslStream = newStream;
1868            }
1869        }
1870        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1871            jpegStream = newStream;
1872        }
1873    }
1874
1875    /* If a zsl stream is set, we know that we have configured at least one input or
1876       bidirectional stream */
1877    if (NULL != zslStream) {
1878        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1879        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1880        mInputStreamInfo.format = zslStream->format;
1881        mInputStreamInfo.usage = zslStream->usage;
1882        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1883                 mInputStreamInfo.dim.width,
1884                mInputStreamInfo.dim.height,
1885                mInputStreamInfo.format, mInputStreamInfo.usage);
1886    }
1887
1888    cleanAndSortStreamInfo();
1889    if (mMetadataChannel) {
1890        delete mMetadataChannel;
1891        mMetadataChannel = NULL;
1892    }
1893    if (mSupportChannel) {
1894        delete mSupportChannel;
1895        mSupportChannel = NULL;
1896    }
1897
1898    if (mAnalysisChannel) {
1899        delete mAnalysisChannel;
1900        mAnalysisChannel = NULL;
1901    }
1902
1903    if (mDummyBatchChannel) {
1904        delete mDummyBatchChannel;
1905        mDummyBatchChannel = NULL;
1906    }
1907
1908    //Create metadata channel and initialize it
1909    cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1910    setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1911            gCamCapability[mCameraId]->color_arrangement);
1912    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1913                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1914                    setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
1915    if (mMetadataChannel == NULL) {
1916        LOGE("failed to allocate metadata channel");
1917        rc = -ENOMEM;
1918        pthread_mutex_unlock(&mMutex);
1919        return rc;
1920    }
1921    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1922    if (rc < 0) {
1923        LOGE("metadata channel initialization failed");
1924        delete mMetadataChannel;
1925        mMetadataChannel = NULL;
1926        pthread_mutex_unlock(&mMutex);
1927        return rc;
1928    }
1929
1930    // Create analysis stream all the time, even when h/w support is not available
1931    {
1932        cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1933        cam_analysis_info_t analysisInfo;
1934        rc = mCommon.getAnalysisInfo(
1935                FALSE,
1936                TRUE,
1937                analysisFeatureMask,
1938                &analysisInfo);
1939        if (rc != NO_ERROR) {
1940            LOGE("getAnalysisInfo failed, ret = %d", rc);
1941            pthread_mutex_unlock(&mMutex);
1942            return rc;
1943        }
1944
1945        cam_color_filter_arrangement_t analysis_color_arrangement =
1946                (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
1947                CAM_FILTER_ARRANGEMENT_Y :
1948                gCamCapability[mCameraId]->color_arrangement);
1949        setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1950                analysis_color_arrangement);
1951
1952        mAnalysisChannel = new QCamera3SupportChannel(
1953                mCameraHandle->camera_handle,
1954                mChannelHandle,
1955                mCameraHandle->ops,
1956                &analysisInfo.analysis_padding_info,
1957                analysisFeatureMask,
1958                CAM_STREAM_TYPE_ANALYSIS,
1959                &analysisInfo.analysis_max_res,
1960                (analysisInfo.analysis_format
1961                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1962                : CAM_FORMAT_YUV_420_NV21),
1963                analysisInfo.hw_analysis_supported,
1964                this,
1965                0); // force buffer count to 0
1966        if (!mAnalysisChannel) {
1967            LOGE("H/W Analysis channel cannot be created");
1968            pthread_mutex_unlock(&mMutex);
1969            return -ENOMEM;
1970        }
1971    }
1972
1973    bool isRawStreamRequested = false;
1974    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1975    /* Allocate channel objects for the requested streams */
1976    for (size_t i = 0; i < streamList->num_streams; i++) {
1977        camera3_stream_t *newStream = streamList->streams[i];
1978        uint32_t stream_usage = newStream->usage;
1979        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1980        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1981        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1982                || IS_USAGE_ZSL(newStream->usage)) &&
1983            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1984            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1985            if (bUseCommonFeatureMask) {
1986                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1987                        commonFeatureMask;
1988            } else {
1989                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1990                        CAM_QCOM_FEATURE_NONE;
1991            }
1992
1993        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1994                LOGH("Input stream configured, reprocess config");
1995        } else {
1996            //for non zsl streams find out the format
1997            switch (newStream->format) {
1998            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1999            {
2000                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2001                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2002                /* add additional features to pp feature mask */
2003                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2004                        mStreamConfigInfo.num_streams);
2005
2006                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2007                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2008                                CAM_STREAM_TYPE_VIDEO;
2009                    if (m_bTnrEnabled && m_bTnrVideo) {
2010                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2011                            CAM_QCOM_FEATURE_CPP_TNR;
2012                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2013                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2014                                ~CAM_QCOM_FEATURE_CDS;
2015                    }
2016                } else {
2017                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2018                            CAM_STREAM_TYPE_PREVIEW;
2019                    if (m_bTnrEnabled && m_bTnrPreview) {
2020                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2021                                CAM_QCOM_FEATURE_CPP_TNR;
2022                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2023                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2024                                ~CAM_QCOM_FEATURE_CDS;
2025                    }
2026                    padding_info.width_padding = mSurfaceStridePadding;
2027                    padding_info.height_padding = CAM_PAD_TO_2;
2028                }
2029                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2030                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2031                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2032                            newStream->height;
2033                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2034                            newStream->width;
2035                }
2036            }
2037            break;
2038            case HAL_PIXEL_FORMAT_YCbCr_420_888:
2039                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2040                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2041                    if (bUseCommonFeatureMask)
2042                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2043                                commonFeatureMask;
2044                    else
2045                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2046                                CAM_QCOM_FEATURE_NONE;
2047                } else {
2048                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2049                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2050                }
2051            break;
2052            case HAL_PIXEL_FORMAT_BLOB:
2053                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2054                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2055                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2056                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2057                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2058                } else {
2059                    if (bUseCommonFeatureMask &&
2060                            isOnEncoder(maxViewfinderSize, newStream->width,
2061                            newStream->height)) {
2062                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2063                    } else {
2064                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2065                    }
2066                }
2067                if (isZsl) {
2068                    if (zslStream) {
2069                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2070                                (int32_t)zslStream->width;
2071                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2072                                (int32_t)zslStream->height;
2073                    } else {
2074                        LOGE("Error, No ZSL stream identified");
2075                        pthread_mutex_unlock(&mMutex);
2076                        return -EINVAL;
2077                    }
2078                } else if (m_bIs4KVideo) {
2079                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2080                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2081                } else if (bYuv888OverrideJpeg) {
2082                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2083                            (int32_t)largeYuv888Size.width;
2084                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2085                            (int32_t)largeYuv888Size.height;
2086                }
2087                break;
2088            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2089            case HAL_PIXEL_FORMAT_RAW16:
2090            case HAL_PIXEL_FORMAT_RAW10:
2091                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2092                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2093                isRawStreamRequested = true;
2094                break;
2095            default:
2096                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2097                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2098                break;
2099            }
2100        }
2101
2102        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2103                (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2104                gCamCapability[mCameraId]->color_arrangement);
2105
2106        if (newStream->priv == NULL) {
2107            //New stream, construct channel
2108            switch (newStream->stream_type) {
2109            case CAMERA3_STREAM_INPUT:
2110                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2111                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2112                break;
2113            case CAMERA3_STREAM_BIDIRECTIONAL:
2114                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2115                    GRALLOC_USAGE_HW_CAMERA_WRITE;
2116                break;
2117            case CAMERA3_STREAM_OUTPUT:
2118                /* For video encoding stream, set read/write rarely
2119                 * flag so that they may be set to un-cached */
2120                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2121                    newStream->usage |=
2122                         (GRALLOC_USAGE_SW_READ_RARELY |
2123                         GRALLOC_USAGE_SW_WRITE_RARELY |
2124                         GRALLOC_USAGE_HW_CAMERA_WRITE);
2125                else if (IS_USAGE_ZSL(newStream->usage))
2126                {
2127                    LOGD("ZSL usage flag skipping");
2128                }
2129                else if (newStream == zslStream
2130                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2131                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2132                } else
2133                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2134                break;
2135            default:
2136                LOGE("Invalid stream_type %d", newStream->stream_type);
2137                break;
2138            }
2139
2140            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2141                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2142                QCamera3ProcessingChannel *channel = NULL;
2143                switch (newStream->format) {
2144                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2145                    if ((newStream->usage &
2146                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2147                            (streamList->operation_mode ==
2148                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2149                    ) {
2150                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2151                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2152                                setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2153                                this,
2154                                newStream,
2155                                (cam_stream_type_t)
2156                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2157                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2158                                mMetadataChannel,
2159                                0); //heap buffers are not required for HFR video channel
2160                        if (channel == NULL) {
2161                            LOGE("allocation of channel failed");
2162                            pthread_mutex_unlock(&mMutex);
2163                            return -ENOMEM;
2164                        }
2165                        //channel->getNumBuffers() will return 0 here so use
2166                        //MAX_INFLIGH_HFR_REQUESTS
2167                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2168                        newStream->priv = channel;
2169                        LOGI("num video buffers in HFR mode: %d",
2170                                 MAX_INFLIGHT_HFR_REQUESTS);
2171                    } else {
2172                        /* Copy stream contents in HFR preview only case to create
2173                         * dummy batch channel so that sensor streaming is in
2174                         * HFR mode */
2175                        if (!m_bIsVideo && (streamList->operation_mode ==
2176                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2177                            mDummyBatchStream = *newStream;
2178                        }
2179                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2180                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2181                                setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2182                                this,
2183                                newStream,
2184                                (cam_stream_type_t)
2185                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2186                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2187                                mMetadataChannel,
2188                                MAX_INFLIGHT_REQUESTS);
2189                        if (channel == NULL) {
2190                            LOGE("allocation of channel failed");
2191                            pthread_mutex_unlock(&mMutex);
2192                            return -ENOMEM;
2193                        }
2194                        newStream->max_buffers = MAX_INFLIGHT_60FPS_REQUESTS;
2195                        newStream->priv = channel;
2196                    }
2197                    break;
2198                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2199                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2200                            mChannelHandle,
2201                            mCameraHandle->ops, captureResultCb,
2202                            setBufferErrorStatus, &padding_info,
2203                            this,
2204                            newStream,
2205                            (cam_stream_type_t)
2206                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2207                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2208                            mMetadataChannel);
2209                    if (channel == NULL) {
2210                        LOGE("allocation of YUV channel failed");
2211                        pthread_mutex_unlock(&mMutex);
2212                        return -ENOMEM;
2213                    }
2214                    newStream->max_buffers = channel->getNumBuffers();
2215                    newStream->priv = channel;
2216                    break;
2217                }
2218                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2219                case HAL_PIXEL_FORMAT_RAW16:
2220                case HAL_PIXEL_FORMAT_RAW10:
2221                    mRawChannel = new QCamera3RawChannel(
2222                            mCameraHandle->camera_handle, mChannelHandle,
2223                            mCameraHandle->ops, captureResultCb,
2224                            setBufferErrorStatus, &padding_info,
2225                            this, newStream,
2226                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2227                            mMetadataChannel,
2228                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2229                    if (mRawChannel == NULL) {
2230                        LOGE("allocation of raw channel failed");
2231                        pthread_mutex_unlock(&mMutex);
2232                        return -ENOMEM;
2233                    }
2234                    newStream->max_buffers = mRawChannel->getNumBuffers();
2235                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2236                    break;
2237                case HAL_PIXEL_FORMAT_BLOB:
2238                    // Max live snapshot inflight buffer is 1. This is to mitigate
2239                    // frame drop issues for video snapshot. The more buffers being
2240                    // allocated, the more frame drops there are.
2241                    mPictureChannel = new QCamera3PicChannel(
2242                            mCameraHandle->camera_handle, mChannelHandle,
2243                            mCameraHandle->ops, captureResultCb,
2244                            setBufferErrorStatus, &padding_info, this, newStream,
2245                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2246                            m_bIs4KVideo, isZsl, mMetadataChannel,
2247                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2248                    if (mPictureChannel == NULL) {
2249                        LOGE("allocation of channel failed");
2250                        pthread_mutex_unlock(&mMutex);
2251                        return -ENOMEM;
2252                    }
2253                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2254                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2255                    mPictureChannel->overrideYuvSize(
2256                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2257                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2258                    break;
2259
2260                default:
2261                    LOGE("not a supported format 0x%x", newStream->format);
2262                    pthread_mutex_unlock(&mMutex);
2263                    return -EINVAL;
2264                }
2265            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2266                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2267            } else {
2268                LOGE("Error, Unknown stream type");
2269                pthread_mutex_unlock(&mMutex);
2270                return -EINVAL;
2271            }
2272
2273            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2274            if (channel != NULL && channel->isUBWCEnabled()) {
2275                cam_format_t fmt = channel->getStreamDefaultFormat(
2276                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2277                        newStream->width, newStream->height);
2278                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2279                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2280                }
2281            }
2282
2283            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2284                    it != mStreamInfo.end(); it++) {
2285                if ((*it)->stream == newStream) {
2286                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2287                    break;
2288                }
2289            }
2290        } else {
2291            // Channel already exists for this stream
2292            // Do nothing for now
2293        }
2294        padding_info = gCamCapability[mCameraId]->padding_info;
2295
2296        /* Do not add entries for input stream in metastream info
2297         * since there is no real stream associated with it
2298         */
2299        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2300            mStreamConfigInfo.num_streams++;
2301    }
2302
2303    //RAW DUMP channel
2304    if (mEnableRawDump && isRawStreamRequested == false){
2305        cam_dimension_t rawDumpSize;
2306        rawDumpSize = getMaxRawSize(mCameraId);
2307        cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2308        setPAAFSupport(rawDumpFeatureMask,
2309                CAM_STREAM_TYPE_RAW,
2310                gCamCapability[mCameraId]->color_arrangement);
2311        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2312                                  mChannelHandle,
2313                                  mCameraHandle->ops,
2314                                  rawDumpSize,
2315                                  &padding_info,
2316                                  this, rawDumpFeatureMask);
2317        if (!mRawDumpChannel) {
2318            LOGE("Raw Dump channel cannot be created");
2319            pthread_mutex_unlock(&mMutex);
2320            return -ENOMEM;
2321        }
2322    }
2323
2324
2325    if (mAnalysisChannel) {
2326        cam_analysis_info_t analysisInfo;
2327        memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2328        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2329                CAM_STREAM_TYPE_ANALYSIS;
2330        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2331                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2332        rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2333                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2334                &analysisInfo);
2335        if (rc != NO_ERROR) {
2336            LOGE("getAnalysisInfo failed, ret = %d", rc);
2337            pthread_mutex_unlock(&mMutex);
2338            return rc;
2339        }
2340        cam_color_filter_arrangement_t analysis_color_arrangement =
2341                (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2342                CAM_FILTER_ARRANGEMENT_Y :
2343                gCamCapability[mCameraId]->color_arrangement);
2344        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2345                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2346                analysis_color_arrangement);
2347
2348        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2349                analysisInfo.analysis_max_res;
2350        mStreamConfigInfo.num_streams++;
2351    }
2352
2353    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2354        cam_analysis_info_t supportInfo;
2355        memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2356        cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2357        setPAAFSupport(callbackFeatureMask,
2358                CAM_STREAM_TYPE_CALLBACK,
2359                gCamCapability[mCameraId]->color_arrangement);
2360        rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2361        if (rc != NO_ERROR) {
2362            LOGE("getAnalysisInfo failed, ret = %d", rc);
2363            pthread_mutex_unlock(&mMutex);
2364            return rc;
2365        }
2366        mSupportChannel = new QCamera3SupportChannel(
2367                mCameraHandle->camera_handle,
2368                mChannelHandle,
2369                mCameraHandle->ops,
2370                &gCamCapability[mCameraId]->padding_info,
2371                callbackFeatureMask,
2372                CAM_STREAM_TYPE_CALLBACK,
2373                &QCamera3SupportChannel::kDim,
2374                CAM_FORMAT_YUV_420_NV21,
2375                supportInfo.hw_analysis_supported,
2376                this, 0);
2377        if (!mSupportChannel) {
2378            LOGE("dummy channel cannot be created");
2379            pthread_mutex_unlock(&mMutex);
2380            return -ENOMEM;
2381        }
2382    }
2383
2384    if (mSupportChannel) {
2385        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2386                QCamera3SupportChannel::kDim;
2387        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2388                CAM_STREAM_TYPE_CALLBACK;
2389        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2390                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2391        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2392                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2393                gCamCapability[mCameraId]->color_arrangement);
2394        mStreamConfigInfo.num_streams++;
2395    }
2396
2397    if (mRawDumpChannel) {
2398        cam_dimension_t rawSize;
2399        rawSize = getMaxRawSize(mCameraId);
2400        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2401                rawSize;
2402        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2403                CAM_STREAM_TYPE_RAW;
2404        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2405                CAM_QCOM_FEATURE_NONE;
2406        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2407                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2408                gCamCapability[mCameraId]->color_arrangement);
2409        mStreamConfigInfo.num_streams++;
2410    }
2411    /* In HFR mode, if video stream is not added, create a dummy channel so that
2412     * ISP can create a batch mode even for preview only case. This channel is
2413     * never 'start'ed (no stream-on), it is only 'initialized'  */
2414    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2415            !m_bIsVideo) {
2416        cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2417        setPAAFSupport(dummyFeatureMask,
2418                CAM_STREAM_TYPE_VIDEO,
2419                gCamCapability[mCameraId]->color_arrangement);
2420        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2421                mChannelHandle,
2422                mCameraHandle->ops, captureResultCb,
2423                setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
2424                this,
2425                &mDummyBatchStream,
2426                CAM_STREAM_TYPE_VIDEO,
2427                dummyFeatureMask,
2428                mMetadataChannel);
2429        if (NULL == mDummyBatchChannel) {
2430            LOGE("creation of mDummyBatchChannel failed."
2431                    "Preview will use non-hfr sensor mode ");
2432        }
2433    }
2434    if (mDummyBatchChannel) {
2435        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2436                mDummyBatchStream.width;
2437        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2438                mDummyBatchStream.height;
2439        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2440                CAM_STREAM_TYPE_VIDEO;
2441        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2442                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2443        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2444                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2445                gCamCapability[mCameraId]->color_arrangement);
2446        mStreamConfigInfo.num_streams++;
2447    }
2448
2449    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2450    mStreamConfigInfo.buffer_info.max_buffers =
2451            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2452
2453    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2454    for (pendingRequestIterator i = mPendingRequestsList.begin();
2455            i != mPendingRequestsList.end();) {
2456        i = erasePendingRequest(i);
2457    }
2458    mPendingFrameDropList.clear();
2459    // Initialize/Reset the pending buffers list
2460    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2461        req.mPendingBufferList.clear();
2462    }
2463    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2464
2465    mPendingReprocessResultList.clear();
2466
2467    mCurJpegMeta.clear();
2468    //Get min frame duration for this streams configuration
2469    deriveMinFrameDuration();
2470
2471    // Update state
2472    mState = CONFIGURED;
2473
2474    pthread_mutex_unlock(&mMutex);
2475
2476    return rc;
2477}
2478
2479/*===========================================================================
2480 * FUNCTION   : validateCaptureRequest
2481 *
2482 * DESCRIPTION: validate a capture request from camera service
2483 *
2484 * PARAMETERS :
2485 *   @request : request from framework to process
2486 *
2487 * RETURN     :
2488 *
2489 *==========================================================================*/
2490int QCamera3HardwareInterface::validateCaptureRequest(
2491                    camera3_capture_request_t *request)
2492{
2493    ssize_t idx = 0;
2494    const camera3_stream_buffer_t *b;
2495    CameraMetadata meta;
2496
2497    /* Sanity check the request */
2498    if (request == NULL) {
2499        LOGE("NULL capture request");
2500        return BAD_VALUE;
2501    }
2502
2503    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2504        /*settings cannot be null for the first request*/
2505        return BAD_VALUE;
2506    }
2507
2508    uint32_t frameNumber = request->frame_number;
2509    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2510        LOGE("Request %d: No output buffers provided!",
2511                __FUNCTION__, frameNumber);
2512        return BAD_VALUE;
2513    }
2514    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2515        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2516                 request->num_output_buffers, MAX_NUM_STREAMS);
2517        return BAD_VALUE;
2518    }
2519    if (request->input_buffer != NULL) {
2520        b = request->input_buffer;
2521        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2522            LOGE("Request %d: Buffer %ld: Status not OK!",
2523                     frameNumber, (long)idx);
2524            return BAD_VALUE;
2525        }
2526        if (b->release_fence != -1) {
2527            LOGE("Request %d: Buffer %ld: Has a release fence!",
2528                     frameNumber, (long)idx);
2529            return BAD_VALUE;
2530        }
2531        if (b->buffer == NULL) {
2532            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2533                     frameNumber, (long)idx);
2534            return BAD_VALUE;
2535        }
2536    }
2537
2538    // Validate all buffers
2539    b = request->output_buffers;
2540    do {
2541        QCamera3ProcessingChannel *channel =
2542                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2543        if (channel == NULL) {
2544            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2545                     frameNumber, (long)idx);
2546            return BAD_VALUE;
2547        }
2548        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2549            LOGE("Request %d: Buffer %ld: Status not OK!",
2550                     frameNumber, (long)idx);
2551            return BAD_VALUE;
2552        }
2553        if (b->release_fence != -1) {
2554            LOGE("Request %d: Buffer %ld: Has a release fence!",
2555                     frameNumber, (long)idx);
2556            return BAD_VALUE;
2557        }
2558        if (b->buffer == NULL) {
2559            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2560                     frameNumber, (long)idx);
2561            return BAD_VALUE;
2562        }
2563        if (*(b->buffer) == NULL) {
2564            LOGE("Request %d: Buffer %ld: NULL private handle!",
2565                     frameNumber, (long)idx);
2566            return BAD_VALUE;
2567        }
2568        idx++;
2569        b = request->output_buffers + idx;
2570    } while (idx < (ssize_t)request->num_output_buffers);
2571
2572    return NO_ERROR;
2573}
2574
2575/*===========================================================================
2576 * FUNCTION   : deriveMinFrameDuration
2577 *
2578 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2579 *              on currently configured streams.
2580 *
2581 * PARAMETERS : NONE
2582 *
2583 * RETURN     : NONE
2584 *
2585 *==========================================================================*/
2586void QCamera3HardwareInterface::deriveMinFrameDuration()
2587{
2588    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2589
2590    maxJpegDim = 0;
2591    maxProcessedDim = 0;
2592    maxRawDim = 0;
2593
2594    // Figure out maximum jpeg, processed, and raw dimensions
2595    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2596        it != mStreamInfo.end(); it++) {
2597
2598        // Input stream doesn't have valid stream_type
2599        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2600            continue;
2601
2602        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2603        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2604            if (dimension > maxJpegDim)
2605                maxJpegDim = dimension;
2606        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2607                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2608                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2609            if (dimension > maxRawDim)
2610                maxRawDim = dimension;
2611        } else {
2612            if (dimension > maxProcessedDim)
2613                maxProcessedDim = dimension;
2614        }
2615    }
2616
2617    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2618            MAX_SIZES_CNT);
2619
2620    //Assume all jpeg dimensions are in processed dimensions.
2621    if (maxJpegDim > maxProcessedDim)
2622        maxProcessedDim = maxJpegDim;
2623    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2624    if (maxProcessedDim > maxRawDim) {
2625        maxRawDim = INT32_MAX;
2626
2627        for (size_t i = 0; i < count; i++) {
2628            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2629                    gCamCapability[mCameraId]->raw_dim[i].height;
2630            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2631                maxRawDim = dimension;
2632        }
2633    }
2634
2635    //Find minimum durations for processed, jpeg, and raw
2636    for (size_t i = 0; i < count; i++) {
2637        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2638                gCamCapability[mCameraId]->raw_dim[i].height) {
2639            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2640            break;
2641        }
2642    }
2643    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2644    for (size_t i = 0; i < count; i++) {
2645        if (maxProcessedDim ==
2646                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2647                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2648            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2649            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2650            break;
2651        }
2652    }
2653}
2654
2655/*===========================================================================
2656 * FUNCTION   : getMinFrameDuration
2657 *
2658 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2659 *              and current request configuration.
2660 *
2661 * PARAMETERS : @request: requset sent by the frameworks
2662 *
2663 * RETURN     : min farme duration for a particular request
2664 *
2665 *==========================================================================*/
2666int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2667{
2668    bool hasJpegStream = false;
2669    bool hasRawStream = false;
2670    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2671        const camera3_stream_t *stream = request->output_buffers[i].stream;
2672        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2673            hasJpegStream = true;
2674        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2675                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2676                stream->format == HAL_PIXEL_FORMAT_RAW16)
2677            hasRawStream = true;
2678    }
2679
2680    if (!hasJpegStream)
2681        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2682    else
2683        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2684}
2685
2686/*===========================================================================
2687 * FUNCTION   : handleBuffersDuringFlushLock
2688 *
2689 * DESCRIPTION: Account for buffers returned from back-end during flush
2690 *              This function is executed while mMutex is held by the caller.
2691 *
2692 * PARAMETERS :
2693 *   @buffer: image buffer for the callback
2694 *
2695 * RETURN     :
2696 *==========================================================================*/
2697void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2698{
2699    bool buffer_found = false;
2700    for (List<PendingBuffersInRequest>::iterator req =
2701            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2702            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2703        for (List<PendingBufferInfo>::iterator i =
2704                req->mPendingBufferList.begin();
2705                i != req->mPendingBufferList.end(); i++) {
2706            if (i->buffer == buffer->buffer) {
2707                mPendingBuffersMap.numPendingBufsAtFlush--;
2708                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2709                    buffer->buffer, req->frame_number,
2710                    mPendingBuffersMap.numPendingBufsAtFlush);
2711                buffer_found = true;
2712                break;
2713            }
2714        }
2715        if (buffer_found) {
2716            break;
2717        }
2718    }
2719    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2720        //signal the flush()
2721        LOGD("All buffers returned to HAL. Continue flush");
2722        pthread_cond_signal(&mBuffersCond);
2723    }
2724}
2725
2726
2727/*===========================================================================
2728 * FUNCTION   : handlePendingReprocResults
2729 *
2730 * DESCRIPTION: check and notify on any pending reprocess results
2731 *
2732 * PARAMETERS :
2733 *   @frame_number   : Pending request frame number
2734 *
2735 * RETURN     : int32_t type of status
2736 *              NO_ERROR  -- success
2737 *              none-zero failure code
2738 *==========================================================================*/
2739int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2740{
2741    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2742            j != mPendingReprocessResultList.end(); j++) {
2743        if (j->frame_number == frame_number) {
2744            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2745
2746            LOGD("Delayed reprocess notify %d",
2747                    frame_number);
2748
2749            for (pendingRequestIterator k = mPendingRequestsList.begin();
2750                    k != mPendingRequestsList.end(); k++) {
2751
2752                if (k->frame_number == j->frame_number) {
2753                    LOGD("Found reprocess frame number %d in pending reprocess List "
2754                            "Take it out!!",
2755                            k->frame_number);
2756
2757                    camera3_capture_result result;
2758                    memset(&result, 0, sizeof(camera3_capture_result));
2759                    result.frame_number = frame_number;
2760                    result.num_output_buffers = 1;
2761                    result.output_buffers =  &j->buffer;
2762                    result.input_buffer = k->input_buffer;
2763                    result.result = k->settings;
2764                    result.partial_result = PARTIAL_RESULT_COUNT;
2765                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2766
2767                    erasePendingRequest(k);
2768                    break;
2769                }
2770            }
2771            mPendingReprocessResultList.erase(j);
2772            break;
2773        }
2774    }
2775    return NO_ERROR;
2776}
2777
2778/*===========================================================================
2779 * FUNCTION   : handleBatchMetadata
2780 *
2781 * DESCRIPTION: Handles metadata buffer callback in batch mode
2782 *
2783 * PARAMETERS : @metadata_buf: metadata buffer
2784 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2785 *                 the meta buf in this method
2786 *
2787 * RETURN     :
2788 *
2789 *==========================================================================*/
2790void QCamera3HardwareInterface::handleBatchMetadata(
2791        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2792{
2793    ATRACE_CALL();
2794
2795    if (NULL == metadata_buf) {
2796        LOGE("metadata_buf is NULL");
2797        return;
2798    }
2799    /* In batch mode, the metdata will contain the frame number and timestamp of
2800     * the last frame in the batch. Eg: a batch containing buffers from request
2801     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2802     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2803     * multiple process_capture_results */
2804    metadata_buffer_t *metadata =
2805            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2806    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2807    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2808    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2809    uint32_t frame_number = 0, urgent_frame_number = 0;
2810    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2811    bool invalid_metadata = false;
2812    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2813    size_t loopCount = 1;
2814
2815    int32_t *p_frame_number_valid =
2816            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2817    uint32_t *p_frame_number =
2818            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2819    int64_t *p_capture_time =
2820            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2821    int32_t *p_urgent_frame_number_valid =
2822            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2823    uint32_t *p_urgent_frame_number =
2824            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2825
2826    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2827            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2828            (NULL == p_urgent_frame_number)) {
2829        LOGE("Invalid metadata");
2830        invalid_metadata = true;
2831    } else {
2832        frame_number_valid = *p_frame_number_valid;
2833        last_frame_number = *p_frame_number;
2834        last_frame_capture_time = *p_capture_time;
2835        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2836        last_urgent_frame_number = *p_urgent_frame_number;
2837    }
2838
2839    /* In batchmode, when no video buffers are requested, set_parms are sent
2840     * for every capture_request. The difference between consecutive urgent
2841     * frame numbers and frame numbers should be used to interpolate the
2842     * corresponding frame numbers and time stamps */
2843    pthread_mutex_lock(&mMutex);
2844    if (urgent_frame_number_valid) {
2845        ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2846        if(idx < 0) {
2847            LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2848                last_urgent_frame_number);
2849            mState = ERROR;
2850            pthread_mutex_unlock(&mMutex);
2851            return;
2852        }
2853        first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
2854        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2855                first_urgent_frame_number;
2856
2857        LOGH("urgent_frm: valid: %d frm_num: %d - %d",
2858                 urgent_frame_number_valid,
2859                first_urgent_frame_number, last_urgent_frame_number);
2860    }
2861
2862    if (frame_number_valid) {
2863        ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2864        if(idx < 0) {
2865            LOGE("Invalid frame number received: %d. Irrecoverable error",
2866                last_frame_number);
2867            mState = ERROR;
2868            pthread_mutex_unlock(&mMutex);
2869            return;
2870        }
2871        first_frame_number = mPendingBatchMap.valueAt(idx);
2872        frameNumDiff = last_frame_number + 1 -
2873                first_frame_number;
2874        mPendingBatchMap.removeItem(last_frame_number);
2875
2876        LOGH("frm: valid: %d frm_num: %d - %d",
2877                 frame_number_valid,
2878                first_frame_number, last_frame_number);
2879
2880    }
2881    pthread_mutex_unlock(&mMutex);
2882
2883    if (urgent_frame_number_valid || frame_number_valid) {
2884        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2885        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2886            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2887                     urgentFrameNumDiff, last_urgent_frame_number);
2888        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2889            LOGE("frameNumDiff: %d frameNum: %d",
2890                     frameNumDiff, last_frame_number);
2891    }
2892
2893    for (size_t i = 0; i < loopCount; i++) {
2894        /* handleMetadataWithLock is called even for invalid_metadata for
2895         * pipeline depth calculation */
2896        if (!invalid_metadata) {
2897            /* Infer frame number. Batch metadata contains frame number of the
2898             * last frame */
2899            if (urgent_frame_number_valid) {
2900                if (i < urgentFrameNumDiff) {
2901                    urgent_frame_number =
2902                            first_urgent_frame_number + i;
2903                    LOGD("inferred urgent frame_number: %d",
2904                             urgent_frame_number);
2905                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2906                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2907                } else {
2908                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2909                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2910                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2911                }
2912            }
2913
2914            /* Infer frame number. Batch metadata contains frame number of the
2915             * last frame */
2916            if (frame_number_valid) {
2917                if (i < frameNumDiff) {
2918                    frame_number = first_frame_number + i;
2919                    LOGD("inferred frame_number: %d", frame_number);
2920                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2921                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2922                } else {
2923                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2924                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2925                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2926                }
2927            }
2928
2929            if (last_frame_capture_time) {
2930                //Infer timestamp
2931                first_frame_capture_time = last_frame_capture_time -
2932                        (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2933                capture_time =
2934                        first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2935                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2936                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2937                LOGH("batch capture_time: %lld, capture_time: %lld",
2938                         last_frame_capture_time, capture_time);
2939            }
2940        }
2941        pthread_mutex_lock(&mMutex);
2942        handleMetadataWithLock(metadata_buf,
2943                false /* free_and_bufdone_meta_buf */,
2944                (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
2945                (i == frameNumDiff-1) /* last metadata in the batch metadata */);
2946        pthread_mutex_unlock(&mMutex);
2947    }
2948
2949    /* BufDone metadata buffer */
2950    if (free_and_bufdone_meta_buf) {
2951        mMetadataChannel->bufDone(metadata_buf);
2952        free(metadata_buf);
2953    }
2954}
2955
2956void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2957        camera3_error_msg_code_t errorCode)
2958{
2959    camera3_notify_msg_t notify_msg;
2960    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2961    notify_msg.type = CAMERA3_MSG_ERROR;
2962    notify_msg.message.error.error_code = errorCode;
2963    notify_msg.message.error.error_stream = NULL;
2964    notify_msg.message.error.frame_number = frameNumber;
2965    mCallbackOps->notify(mCallbackOps, &notify_msg);
2966
2967    return;
2968}
2969/*===========================================================================
2970 * FUNCTION   : handleMetadataWithLock
2971 *
2972 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2973 *
2974 * PARAMETERS : @metadata_buf: metadata buffer
2975 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2976 *                 the meta buf in this method
2977 *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
2978 *                  last urgent metadata in a batch. Always true for non-batch mode
2979 *              @lastMetadataInBatch: Boolean to indicate whether this is the
2980 *                  last metadata in a batch. Always true for non-batch mode
2981 *
2982 * RETURN     :
2983 *
2984 *==========================================================================*/
2985void QCamera3HardwareInterface::handleMetadataWithLock(
2986    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2987    bool lastUrgentMetadataInBatch, bool lastMetadataInBatch)
2988{
2989    ATRACE_CALL();
2990    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2991        //during flush do not send metadata from this thread
2992        LOGD("not sending metadata during flush or when mState is error");
2993        if (free_and_bufdone_meta_buf) {
2994            mMetadataChannel->bufDone(metadata_buf);
2995            free(metadata_buf);
2996        }
2997        return;
2998    }
2999
3000    //not in flush
3001    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3002    int32_t frame_number_valid, urgent_frame_number_valid;
3003    uint32_t frame_number, urgent_frame_number;
3004    int64_t capture_time, capture_time_av;
3005    nsecs_t currentSysTime;
3006
3007    int32_t *p_frame_number_valid =
3008            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3009    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3010    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3011    int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
3012    int32_t *p_urgent_frame_number_valid =
3013            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3014    uint32_t *p_urgent_frame_number =
3015            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3016    IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3017            metadata) {
3018        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3019                 *p_frame_number_valid, *p_frame_number);
3020    }
3021
3022    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3023            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3024        LOGE("Invalid metadata");
3025        if (free_and_bufdone_meta_buf) {
3026            mMetadataChannel->bufDone(metadata_buf);
3027            free(metadata_buf);
3028        }
3029        goto done_metadata;
3030    }
3031    frame_number_valid =        *p_frame_number_valid;
3032    frame_number =              *p_frame_number;
3033    capture_time =              *p_capture_time;
3034    capture_time_av =           *p_capture_time_av;
3035    urgent_frame_number_valid = *p_urgent_frame_number_valid;
3036    urgent_frame_number =       *p_urgent_frame_number;
3037    currentSysTime =            systemTime(CLOCK_MONOTONIC);
3038
3039    if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3040        const int tries = 3;
3041        nsecs_t bestGap, measured;
3042        for (int i = 0; i < tries; ++i) {
3043            const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3044            const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3045            const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3046            const nsecs_t gap = tmono2 - tmono;
3047            if (i == 0 || gap < bestGap) {
3048                bestGap = gap;
3049                measured = tbase - ((tmono + tmono2) >> 1);
3050            }
3051        }
3052        capture_time -= measured;
3053    }
3054
3055    // Detect if buffers from any requests are overdue
3056    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3057        if ( (currentSysTime - req.timestamp) >
3058            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
3059            for (auto &missed : req.mPendingBufferList) {
3060                assert(missed.stream->priv);
3061                if (missed.stream->priv) {
3062                    QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3063                    assert(ch->mStreams[0]);
3064                    if (ch->mStreams[0]) {
3065                        LOGW("Missing: frame = %d, buffer = %p,"
3066                            "stream type = %d, stream format = %d",
3067                            req.frame_number, missed.buffer,
3068                            ch->mStreams[0]->getMyType(), missed.stream->format);
3069                        ch->timeoutFrame(req.frame_number);
3070                    }
3071                }
3072            }
3073        }
3074    }
3075    //Partial result on process_capture_result for timestamp
3076    if (urgent_frame_number_valid) {
3077        LOGD("valid urgent frame_number = %u, capture_time = %lld",
3078           urgent_frame_number, capture_time);
3079
3080        //Recieved an urgent Frame Number, handle it
3081        //using partial results
3082        for (pendingRequestIterator i =
3083                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3084            LOGD("Iterator Frame = %d urgent frame = %d",
3085                 i->frame_number, urgent_frame_number);
3086
3087            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3088                (i->partial_result_cnt == 0)) {
3089                LOGE("Error: HAL missed urgent metadata for frame number %d",
3090                         i->frame_number);
3091                i->partial_result_cnt++;
3092            }
3093
3094            if (i->frame_number == urgent_frame_number &&
3095                     i->bUrgentReceived == 0) {
3096
3097                camera3_capture_result_t result;
3098                memset(&result, 0, sizeof(camera3_capture_result_t));
3099
3100                i->partial_result_cnt++;
3101                i->bUrgentReceived = 1;
3102                // Extract 3A metadata
3103                result.result = translateCbUrgentMetadataToResultMetadata(
3104                        metadata, lastUrgentMetadataInBatch);
3105                // Populate metadata result
3106                result.frame_number = urgent_frame_number;
3107                result.num_output_buffers = 0;
3108                result.output_buffers = NULL;
3109                result.partial_result = i->partial_result_cnt;
3110
3111                mCallbackOps->process_capture_result(mCallbackOps, &result);
3112                LOGD("urgent frame_number = %u, capture_time = %lld",
3113                      result.frame_number, capture_time);
3114                free_camera_metadata((camera_metadata_t *)result.result);
3115                break;
3116            }
3117        }
3118    }
3119
3120    if (!frame_number_valid) {
3121        LOGD("Not a valid normal frame number, used as SOF only");
3122        if (free_and_bufdone_meta_buf) {
3123            mMetadataChannel->bufDone(metadata_buf);
3124            free(metadata_buf);
3125        }
3126        goto done_metadata;
3127    }
3128    LOGH("valid frame_number = %u, capture_time = %lld",
3129            frame_number, capture_time);
3130
3131    for (pendingRequestIterator i = mPendingRequestsList.begin();
3132            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3133        // Flush out all entries with less or equal frame numbers.
3134
3135        camera3_capture_result_t result;
3136        memset(&result, 0, sizeof(camera3_capture_result_t));
3137
3138        LOGD("frame_number in the list is %u", i->frame_number);
3139        i->partial_result_cnt++;
3140        result.partial_result = i->partial_result_cnt;
3141
3142        // Check whether any stream buffer corresponding to this is dropped or not
3143        // If dropped, then send the ERROR_BUFFER for the corresponding stream
3144        // The API does not expect a blob buffer to be dropped
3145        if (p_cam_frame_drop) {
3146            /* Clear notify_msg structure */
3147            camera3_notify_msg_t notify_msg;
3148            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3149            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3150                    j != i->buffers.end(); j++) {
3151                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3152                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3153                for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
3154                    if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
3155                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3156                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
3157                                __func__, i->frame_number, streamID, j->stream->format);
3158                        notify_msg.type = CAMERA3_MSG_ERROR;
3159                        notify_msg.message.error.frame_number = i->frame_number;
3160                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
3161                        notify_msg.message.error.error_stream = j->stream;
3162                        mCallbackOps->notify(mCallbackOps, &notify_msg);
3163                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
3164                                __func__, i->frame_number, streamID, j->stream->format);
3165                        PendingFrameDropInfo PendingFrameDrop;
3166                        PendingFrameDrop.frame_number=i->frame_number;
3167                        PendingFrameDrop.stream_ID = streamID;
3168                        // Add the Frame drop info to mPendingFrameDropList
3169                        mPendingFrameDropList.push_back(PendingFrameDrop);
3170                   }
3171               }
3172            }
3173        }
3174
3175        // Send empty metadata with already filled buffers for dropped metadata
3176        // and send valid metadata with already filled buffers for current metadata
3177        /* we could hit this case when we either
3178         * 1. have a pending reprocess request or
3179         * 2. miss a metadata buffer callback */
3180        if (i->frame_number < frame_number) {
3181            if (i->input_buffer) {
3182                /* this will be handled in handleInputBufferWithLock */
3183                i++;
3184                continue;
3185            } else {
3186
3187                CameraMetadata dummyMetadata;
3188                dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3189                result.result = dummyMetadata.release();
3190
3191                notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
3192
3193                // partial_result should be PARTIAL_RESULT_CNT in case of
3194                // ERROR_RESULT.
3195                i->partial_result_cnt = PARTIAL_RESULT_COUNT;
3196                result.partial_result = PARTIAL_RESULT_COUNT;
3197            }
3198        } else {
3199            mPendingLiveRequest--;
3200            /* Clear notify_msg structure */
3201            camera3_notify_msg_t notify_msg;
3202            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3203
3204            // Send shutter notify to frameworks
3205            notify_msg.type = CAMERA3_MSG_SHUTTER;
3206            notify_msg.message.shutter.frame_number = i->frame_number;
3207            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3208            mCallbackOps->notify(mCallbackOps, &notify_msg);
3209
3210            i->timestamp = capture_time;
3211
3212            /* Set the timestamp in display metadata so that clients aware of
3213               private_handle such as VT can use this un-modified timestamps.
3214               Camera framework is unaware of this timestamp and cannot change this */
3215            updateTimeStampInPendingBuffers(i->frame_number, capture_time_av);
3216
3217            // Find channel requiring metadata, meaning internal offline postprocess
3218            // is needed.
3219            //TODO: for now, we don't support two streams requiring metadata at the same time.
3220            // (because we are not making copies, and metadata buffer is not reference counted.
3221            bool internalPproc = false;
3222            for (pendingBufferIterator iter = i->buffers.begin();
3223                    iter != i->buffers.end(); iter++) {
3224                if (iter->need_metadata) {
3225                    internalPproc = true;
3226                    QCamera3ProcessingChannel *channel =
3227                            (QCamera3ProcessingChannel *)iter->stream->priv;
3228                    channel->queueReprocMetadata(metadata_buf);
3229                    break;
3230                }
3231            }
3232
3233            // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
3234            result.result = translateFromHalMetadata(metadata,
3235                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3236                    i->capture_intent, i->hybrid_ae_enable,
3237                     /* DevCamDebug metadata translateFromHalMetadata function call*/
3238                    i->DevCamDebug_meta_enable,
3239                    /* DevCamDebug metadata end */
3240                    internalPproc, i->fwkCacMode,
3241                    lastMetadataInBatch);
3242            // atrace_end(ATRACE_TAG_ALWAYS);
3243
3244            saveExifParams(metadata);
3245
3246            if (i->blob_request) {
3247                {
3248                    //Dump tuning metadata if enabled and available
3249                    char prop[PROPERTY_VALUE_MAX];
3250                    memset(prop, 0, sizeof(prop));
3251                    property_get("persist.camera.dumpmetadata", prop, "0");
3252                    int32_t enabled = atoi(prop);
3253                    if (enabled && metadata->is_tuning_params_valid) {
3254                        dumpMetadataToFile(metadata->tuning_params,
3255                               mMetaFrameCount,
3256                               enabled,
3257                               "Snapshot",
3258                               frame_number);
3259                    }
3260                }
3261            }
3262
3263            if (!internalPproc) {
3264                LOGD("couldn't find need_metadata for this metadata");
3265                // Return metadata buffer
3266                if (free_and_bufdone_meta_buf) {
3267                    mMetadataChannel->bufDone(metadata_buf);
3268                    free(metadata_buf);
3269                }
3270            }
3271        }
3272        if (!result.result) {
3273            LOGE("metadata is NULL");
3274        }
3275        result.frame_number = i->frame_number;
3276        result.input_buffer = i->input_buffer;
3277        result.num_output_buffers = 0;
3278        result.output_buffers = NULL;
3279        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3280                    j != i->buffers.end(); j++) {
3281            if (j->buffer) {
3282                result.num_output_buffers++;
3283            }
3284        }
3285
3286        updateFpsInPreviewBuffer(metadata, i->frame_number);
3287
3288        if (result.num_output_buffers > 0) {
3289            camera3_stream_buffer_t *result_buffers =
3290                new camera3_stream_buffer_t[result.num_output_buffers];
3291            if (result_buffers != NULL) {
3292                size_t result_buffers_idx = 0;
3293                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3294                        j != i->buffers.end(); j++) {
3295                    if (j->buffer) {
3296                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3297                                m != mPendingFrameDropList.end(); m++) {
3298                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3299                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3300                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3301                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3302                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3303                                        frame_number, streamID);
3304                                m = mPendingFrameDropList.erase(m);
3305                                break;
3306                            }
3307                        }
3308                        j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
3309                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
3310                        result_buffers[result_buffers_idx++] = *(j->buffer);
3311                        free(j->buffer);
3312                        j->buffer = NULL;
3313                    }
3314                }
3315
3316                result.output_buffers = result_buffers;
3317                mCallbackOps->process_capture_result(mCallbackOps, &result);
3318                LOGD("meta frame_number = %u, capture_time = %lld",
3319                        result.frame_number, i->timestamp);
3320                free_camera_metadata((camera_metadata_t *)result.result);
3321                delete[] result_buffers;
3322            }else {
3323                LOGE("Fatal error: out of memory");
3324            }
3325        } else {
3326            mCallbackOps->process_capture_result(mCallbackOps, &result);
3327            LOGD("meta frame_number = %u, capture_time = %lld",
3328                    result.frame_number, i->timestamp);
3329            free_camera_metadata((camera_metadata_t *)result.result);
3330        }
3331
3332        i = erasePendingRequest(i);
3333
3334        if (!mPendingReprocessResultList.empty()) {
3335            handlePendingReprocResults(frame_number + 1);
3336        }
3337    }
3338
3339done_metadata:
3340    for (pendingRequestIterator i = mPendingRequestsList.begin();
3341            i != mPendingRequestsList.end() ;i++) {
3342        i->pipeline_depth++;
3343    }
3344    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3345    unblockRequestIfNecessary();
3346}
3347
3348/*===========================================================================
3349 * FUNCTION   : hdrPlusPerfLock
3350 *
3351 * DESCRIPTION: perf lock for HDR+ using custom intent
3352 *
3353 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3354 *
3355 * RETURN     : None
3356 *
3357 *==========================================================================*/
3358void QCamera3HardwareInterface::hdrPlusPerfLock(
3359        mm_camera_super_buf_t *metadata_buf)
3360{
3361    if (NULL == metadata_buf) {
3362        LOGE("metadata_buf is NULL");
3363        return;
3364    }
3365    metadata_buffer_t *metadata =
3366            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367    int32_t *p_frame_number_valid =
3368            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3369    uint32_t *p_frame_number =
3370            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3371
3372    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3373        LOGE("%s: Invalid metadata", __func__);
3374        return;
3375    }
3376
3377    //acquire perf lock for 5 sec after the last HDR frame is captured
3378    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3379        if ((p_frame_number != NULL) &&
3380                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3381            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3382        }
3383    }
3384
3385    //release lock after perf lock timer is expired. If lock is already released,
3386    //isTimerReset returns false
3387    if (m_perfLock.isTimerReset()) {
3388        mLastCustIntentFrmNum = -1;
3389        m_perfLock.lock_rel_timed();
3390    }
3391}
3392
3393/*===========================================================================
3394 * FUNCTION   : handleInputBufferWithLock
3395 *
3396 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3397 *
3398 * PARAMETERS : @frame_number: frame number of the input buffer
3399 *
3400 * RETURN     :
3401 *
3402 *==========================================================================*/
3403void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3404{
3405    ATRACE_CALL();
3406    pendingRequestIterator i = mPendingRequestsList.begin();
3407    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3408        i++;
3409    }
3410    if (i != mPendingRequestsList.end() && i->input_buffer) {
3411        //found the right request
3412        if (!i->shutter_notified) {
3413            CameraMetadata settings;
3414            camera3_notify_msg_t notify_msg;
3415            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3416            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3417            if(i->settings) {
3418                settings = i->settings;
3419                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3420                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3421                } else {
3422                    LOGE("No timestamp in input settings! Using current one.");
3423                }
3424            } else {
3425                LOGE("Input settings missing!");
3426            }
3427
3428            notify_msg.type = CAMERA3_MSG_SHUTTER;
3429            notify_msg.message.shutter.frame_number = frame_number;
3430            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3431            mCallbackOps->notify(mCallbackOps, &notify_msg);
3432            i->shutter_notified = true;
3433            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3434                        i->frame_number, notify_msg.message.shutter.timestamp);
3435        }
3436
3437        if (i->input_buffer->release_fence != -1) {
3438           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3439           close(i->input_buffer->release_fence);
3440           if (rc != OK) {
3441               LOGE("input buffer sync wait failed %d", rc);
3442           }
3443        }
3444
3445        camera3_capture_result result;
3446        memset(&result, 0, sizeof(camera3_capture_result));
3447        result.frame_number = frame_number;
3448        result.result = i->settings;
3449        result.input_buffer = i->input_buffer;
3450        result.partial_result = PARTIAL_RESULT_COUNT;
3451
3452        mCallbackOps->process_capture_result(mCallbackOps, &result);
3453        LOGD("Input request metadata and input buffer frame_number = %u",
3454                        i->frame_number);
3455        i = erasePendingRequest(i);
3456    } else {
3457        LOGE("Could not find input request for frame number %d", frame_number);
3458    }
3459}
3460
3461/*===========================================================================
3462 * FUNCTION   : handleBufferWithLock
3463 *
3464 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3465 *
3466 * PARAMETERS : @buffer: image buffer for the callback
3467 *              @frame_number: frame number of the image buffer
3468 *
3469 * RETURN     :
3470 *
3471 *==========================================================================*/
3472void QCamera3HardwareInterface::handleBufferWithLock(
3473    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3474{
3475    ATRACE_CALL();
3476    /* Nothing to be done during error state */
3477    if ((ERROR == mState) || (DEINIT == mState)) {
3478        return;
3479    }
3480    if (mFlushPerf) {
3481        handleBuffersDuringFlushLock(buffer);
3482        return;
3483    }
3484    //not in flush
3485    // If the frame number doesn't exist in the pending request list,
3486    // directly send the buffer to the frameworks, and update pending buffers map
3487    // Otherwise, book-keep the buffer.
3488    pendingRequestIterator i = mPendingRequestsList.begin();
3489    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3490        i++;
3491    }
3492    if (i == mPendingRequestsList.end()) {
3493        // Verify all pending requests frame_numbers are greater
3494        for (pendingRequestIterator j = mPendingRequestsList.begin();
3495                j != mPendingRequestsList.end(); j++) {
3496            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3497                LOGW("Error: pending live frame number %d is smaller than %d",
3498                         j->frame_number, frame_number);
3499            }
3500        }
3501        camera3_capture_result_t result;
3502        memset(&result, 0, sizeof(camera3_capture_result_t));
3503        result.result = NULL;
3504        result.frame_number = frame_number;
3505        result.num_output_buffers = 1;
3506        result.partial_result = 0;
3507        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3508                m != mPendingFrameDropList.end(); m++) {
3509            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3510            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3511            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3512                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3513                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3514                         frame_number, streamID);
3515                m = mPendingFrameDropList.erase(m);
3516                break;
3517            }
3518        }
3519        buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3520        result.output_buffers = buffer;
3521        LOGH("result frame_number = %d, buffer = %p",
3522                 frame_number, buffer->buffer);
3523
3524        mPendingBuffersMap.removeBuf(buffer->buffer);
3525
3526        mCallbackOps->process_capture_result(mCallbackOps, &result);
3527    } else {
3528        if (i->input_buffer) {
3529            CameraMetadata settings;
3530            camera3_notify_msg_t notify_msg;
3531            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3532            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3533            if(i->settings) {
3534                settings = i->settings;
3535                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3536                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3537                } else {
3538                    LOGW("No timestamp in input settings! Using current one.");
3539                }
3540            } else {
3541                LOGE("Input settings missing!");
3542            }
3543
3544            notify_msg.type = CAMERA3_MSG_SHUTTER;
3545            notify_msg.message.shutter.frame_number = frame_number;
3546            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3547
3548            if (i->input_buffer->release_fence != -1) {
3549               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3550               close(i->input_buffer->release_fence);
3551               if (rc != OK) {
3552                   LOGE("input buffer sync wait failed %d", rc);
3553               }
3554            }
3555            buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
3556            mPendingBuffersMap.removeBuf(buffer->buffer);
3557
3558            bool notifyNow = true;
3559            for (pendingRequestIterator j = mPendingRequestsList.begin();
3560                    j != mPendingRequestsList.end(); j++) {
3561                if (j->frame_number < frame_number) {
3562                    notifyNow = false;
3563                    break;
3564                }
3565            }
3566
3567            if (notifyNow) {
3568                camera3_capture_result result;
3569                memset(&result, 0, sizeof(camera3_capture_result));
3570                result.frame_number = frame_number;
3571                result.result = i->settings;
3572                result.input_buffer = i->input_buffer;
3573                result.num_output_buffers = 1;
3574                result.output_buffers = buffer;
3575                result.partial_result = PARTIAL_RESULT_COUNT;
3576
3577                mCallbackOps->notify(mCallbackOps, &notify_msg);
3578                mCallbackOps->process_capture_result(mCallbackOps, &result);
3579                LOGD("Notify reprocess now %d!", frame_number);
3580                i = erasePendingRequest(i);
3581            } else {
3582                // Cache reprocess result for later
3583                PendingReprocessResult pendingResult;
3584                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3585                pendingResult.notify_msg = notify_msg;
3586                pendingResult.buffer = *buffer;
3587                pendingResult.frame_number = frame_number;
3588                mPendingReprocessResultList.push_back(pendingResult);
3589                LOGD("Cache reprocess result %d!", frame_number);
3590            }
3591        } else {
3592            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3593                j != i->buffers.end(); j++) {
3594                if (j->stream == buffer->stream) {
3595                    if (j->buffer != NULL) {
3596                        LOGE("Error: buffer is already set");
3597                    } else {
3598                        j->buffer = (camera3_stream_buffer_t *)malloc(
3599                            sizeof(camera3_stream_buffer_t));
3600                        *(j->buffer) = *buffer;
3601                        LOGH("cache buffer %p at result frame_number %u",
3602                             buffer->buffer, frame_number);
3603                    }
3604                }
3605            }
3606        }
3607    }
3608}
3609
3610/*===========================================================================
3611 * FUNCTION   : unblockRequestIfNecessary
3612 *
3613 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3614 *              that mMutex is held when this function is called.
3615 *
3616 * PARAMETERS :
3617 *
3618 * RETURN     :
3619 *
3620 *==========================================================================*/
3621void QCamera3HardwareInterface::unblockRequestIfNecessary()
3622{
3623   // Unblock process_capture_request
3624   pthread_cond_signal(&mRequestCond);
3625}
3626
3627
3628/*===========================================================================
3629 * FUNCTION   : processCaptureRequest
3630 *
3631 * DESCRIPTION: process a capture request from camera service
3632 *
3633 * PARAMETERS :
3634 *   @request : request from framework to process
3635 *
3636 * RETURN     :
3637 *
3638 *==========================================================================*/
3639int QCamera3HardwareInterface::processCaptureRequest(
3640                    camera3_capture_request_t *request)
3641{
3642    ATRACE_CALL();
3643    int rc = NO_ERROR;
3644    int32_t request_id;
3645    CameraMetadata meta;
3646    bool isVidBufRequested = false;
3647    camera3_stream_buffer_t *pInputBuffer = NULL;
3648
3649    pthread_mutex_lock(&mMutex);
3650
3651    // Validate current state
3652    switch (mState) {
3653        case CONFIGURED:
3654        case STARTED:
3655            /* valid state */
3656            break;
3657
3658        case ERROR:
3659            pthread_mutex_unlock(&mMutex);
3660            handleCameraDeviceError();
3661            return -ENODEV;
3662
3663        default:
3664            LOGE("Invalid state %d", mState);
3665            pthread_mutex_unlock(&mMutex);
3666            return -ENODEV;
3667    }
3668
3669    rc = validateCaptureRequest(request);
3670    if (rc != NO_ERROR) {
3671        LOGE("incoming request is not valid");
3672        pthread_mutex_unlock(&mMutex);
3673        return rc;
3674    }
3675
3676    meta = request->settings;
3677
3678    // For first capture request, send capture intent, and
3679    // stream on all streams
3680    if (mState == CONFIGURED) {
3681        // send an unconfigure to the backend so that the isp
3682        // resources are deallocated
3683        if (!mFirstConfiguration) {
3684            cam_stream_size_info_t stream_config_info;
3685            int32_t hal_version = CAM_HAL_V3;
3686            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3687            stream_config_info.buffer_info.min_buffers =
3688                    MIN_INFLIGHT_REQUESTS;
3689            stream_config_info.buffer_info.max_buffers =
3690                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3691            clear_metadata_buffer(mParameters);
3692            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3693                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3694            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3695                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3696            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3697                    mParameters);
3698            if (rc < 0) {
3699                LOGE("set_parms for unconfigure failed");
3700                pthread_mutex_unlock(&mMutex);
3701                return rc;
3702            }
3703        }
3704        m_perfLock.lock_acq();
3705        /* get eis information for stream configuration */
3706        cam_is_type_t is_type;
3707        char is_type_value[PROPERTY_VALUE_MAX];
3708        property_get("persist.camera.is_type", is_type_value, "0");
3709        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3710
3711        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3712            int32_t hal_version = CAM_HAL_V3;
3713            uint8_t captureIntent =
3714                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3715            mCaptureIntent = captureIntent;
3716            clear_metadata_buffer(mParameters);
3717            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3718            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3719        }
3720
3721        //If EIS is enabled, turn it on for video
3722        bool setEis = m_bEisEnable && m_bEisSupportedSize && !meta.exists(QCAMERA3_USE_AV_TIMER);
3723        int32_t vsMode;
3724        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3725        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3726            rc = BAD_VALUE;
3727        }
3728
3729        //IS type will be 0 unless EIS is supported. If EIS is supported
3730        //it could either be 1 or 4 depending on the stream and video size
3731        if (setEis) {
3732            if (!m_bEisSupportedSize) {
3733                is_type = IS_TYPE_DIS;
3734            } else {
3735                is_type = IS_TYPE_EIS_2_0;
3736            }
3737            mStreamConfigInfo.is_type = is_type;
3738        } else {
3739            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3740        }
3741
3742        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3743                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3744        int32_t tintless_value = 1;
3745        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3746                CAM_INTF_PARM_TINTLESS, tintless_value);
3747        //Disable CDS for HFR mode or if DIS/EIS is on.
3748        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3749        //after every configure_stream
3750        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3751                (m_bIsVideo)) {
3752            int32_t cds = CAM_CDS_MODE_OFF;
3753            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3754                    CAM_INTF_PARM_CDS_MODE, cds))
3755                LOGE("Failed to disable CDS for HFR mode");
3756
3757        }
3758
3759        if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3760            uint8_t* use_av_timer = NULL;
3761
3762            if (m_debug_avtimer){
3763                use_av_timer = &m_debug_avtimer;
3764            }
3765            else{
3766                use_av_timer =
3767                    meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3768            }
3769
3770            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3771                rc = BAD_VALUE;
3772            }
3773        }
3774
3775        setMobicat();
3776
3777        /* Set fps and hfr mode while sending meta stream info so that sensor
3778         * can configure appropriate streaming mode */
3779        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3780        mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3781        mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3782        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3783            rc = setHalFpsRange(meta, mParameters);
3784            if (rc == NO_ERROR) {
3785                int32_t max_fps =
3786                    (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3787                if (mBatchSize) {
3788                    /* For HFR, more buffers are dequeued upfront to improve the performance */
3789                    mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3790                    mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3791                } else if (max_fps == 60) {
3792                    /* for 60 fps usecas increase inflight requests */
3793                    mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3794                    mMaxInFlightRequests = MAX_INFLIGHT_60FPS_REQUESTS;
3795                } else if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
3796                    /* for non 60 fps video use cases, set min = max inflight requests to
3797                    avoid frame drops due to degraded system performance */
3798                    mMinInFlightRequests = MAX_INFLIGHT_REQUESTS;
3799                }
3800            }
3801            else {
3802                LOGE("setHalFpsRange failed");
3803            }
3804        }
3805        if (meta.exists(ANDROID_CONTROL_MODE)) {
3806            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3807            rc = extractSceneMode(meta, metaMode, mParameters);
3808            if (rc != NO_ERROR) {
3809                LOGE("extractSceneMode failed");
3810            }
3811        }
3812        memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
3813
3814
3815        //TODO: validate the arguments, HSV scenemode should have only the
3816        //advertised fps ranges
3817
3818        /*set the capture intent, hal version, tintless, stream info,
3819         *and disenable parameters to the backend*/
3820        LOGD("set_parms META_STREAM_INFO " );
3821        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3822            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3823                    "Format:%d",
3824                    mStreamConfigInfo.type[i],
3825                    mStreamConfigInfo.stream_sizes[i].width,
3826                    mStreamConfigInfo.stream_sizes[i].height,
3827                    mStreamConfigInfo.postprocess_mask[i],
3828                    mStreamConfigInfo.format[i]);
3829        }
3830
3831        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3832                    mParameters);
3833        if (rc < 0) {
3834            LOGE("set_parms failed for hal version, stream info");
3835        }
3836
3837        cam_dimension_t sensor_dim;
3838        memset(&sensor_dim, 0, sizeof(sensor_dim));
3839        rc = getSensorOutputSize(sensor_dim);
3840        if (rc != NO_ERROR) {
3841            LOGE("Failed to get sensor output size");
3842            pthread_mutex_unlock(&mMutex);
3843            goto error_exit;
3844        }
3845
3846        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3847                gCamCapability[mCameraId]->active_array_size.height,
3848                sensor_dim.width, sensor_dim.height);
3849
3850        /* Set batchmode before initializing channel. Since registerBuffer
3851         * internally initializes some of the channels, better set batchmode
3852         * even before first register buffer */
3853        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3854            it != mStreamInfo.end(); it++) {
3855            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3856            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3857                    && mBatchSize) {
3858                rc = channel->setBatchSize(mBatchSize);
3859                //Disable per frame map unmap for HFR/batchmode case
3860                rc |= channel->setPerFrameMapUnmap(false);
3861                if (NO_ERROR != rc) {
3862                    LOGE("Channel init failed %d", rc);
3863                    pthread_mutex_unlock(&mMutex);
3864                    goto error_exit;
3865                }
3866            }
3867        }
3868
3869        //First initialize all streams
3870        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3871            it != mStreamInfo.end(); it++) {
3872            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3873            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3874               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3875               setEis)
3876                rc = channel->initialize(is_type);
3877            else {
3878                rc = channel->initialize(IS_TYPE_NONE);
3879            }
3880            if (NO_ERROR != rc) {
3881                LOGE("Channel initialization failed %d", rc);
3882                pthread_mutex_unlock(&mMutex);
3883                goto error_exit;
3884            }
3885        }
3886
3887        if (mRawDumpChannel) {
3888            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3889            if (rc != NO_ERROR) {
3890                LOGE("Error: Raw Dump Channel init failed");
3891                pthread_mutex_unlock(&mMutex);
3892                goto error_exit;
3893            }
3894        }
3895        if (mSupportChannel) {
3896            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3897            if (rc < 0) {
3898                LOGE("Support channel initialization failed");
3899                pthread_mutex_unlock(&mMutex);
3900                goto error_exit;
3901            }
3902        }
3903        if (mAnalysisChannel) {
3904            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3905            if (rc < 0) {
3906                LOGE("Analysis channel initialization failed");
3907                pthread_mutex_unlock(&mMutex);
3908                goto error_exit;
3909            }
3910        }
3911        if (mDummyBatchChannel) {
3912            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3913            if (rc < 0) {
3914                LOGE("mDummyBatchChannel setBatchSize failed");
3915                pthread_mutex_unlock(&mMutex);
3916                goto error_exit;
3917            }
3918            rc = mDummyBatchChannel->initialize(is_type);
3919            if (rc < 0) {
3920                LOGE("mDummyBatchChannel initialization failed");
3921                pthread_mutex_unlock(&mMutex);
3922                goto error_exit;
3923            }
3924        }
3925
3926        // Set bundle info
3927        rc = setBundleInfo();
3928        if (rc < 0) {
3929            LOGE("setBundleInfo failed %d", rc);
3930            pthread_mutex_unlock(&mMutex);
3931            goto error_exit;
3932        }
3933
3934        //update settings from app here
3935        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3936            mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3937            LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3938        }
3939        if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3940            mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3941            LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3942        }
3943        if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3944            mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3945            LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3946
3947            if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3948                (mLinkedCameraId != mCameraId) ) {
3949                LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3950                    mLinkedCameraId, mCameraId);
3951                goto error_exit;
3952            }
3953        }
3954
3955        // add bundle related cameras
3956        LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3957        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3958            if (mIsDeviceLinked)
3959                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3960            else
3961                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3962
3963            pthread_mutex_lock(&gCamLock);
3964
3965            if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3966                LOGE("Dualcam: Invalid Session Id ");
3967                pthread_mutex_unlock(&gCamLock);
3968                goto error_exit;
3969            }
3970
3971            if (mIsMainCamera == 1) {
3972                m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3973                m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3974                // related session id should be session id of linked session
3975                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3976            } else {
3977                m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3978                m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3979                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3980            }
3981            pthread_mutex_unlock(&gCamLock);
3982
3983            rc = mCameraHandle->ops->sync_related_sensors(
3984                    mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3985            if (rc < 0) {
3986                LOGE("Dualcam: link failed");
3987                goto error_exit;
3988            }
3989        }
3990
3991        //Then start them.
3992        LOGH("Start META Channel");
3993        rc = mMetadataChannel->start();
3994        if (rc < 0) {
3995            LOGE("META channel start failed");
3996            pthread_mutex_unlock(&mMutex);
3997            goto error_exit;
3998        }
3999
4000        if (mAnalysisChannel) {
4001            rc = mAnalysisChannel->start();
4002            if (rc < 0) {
4003                LOGE("Analysis channel start failed");
4004                mMetadataChannel->stop();
4005                pthread_mutex_unlock(&mMutex);
4006                goto error_exit;
4007            }
4008        }
4009
4010        if (mSupportChannel) {
4011            rc = mSupportChannel->start();
4012            if (rc < 0) {
4013                LOGE("Support channel start failed");
4014                mMetadataChannel->stop();
4015                /* Although support and analysis are mutually exclusive today
4016                   adding it in anycase for future proofing */
4017                if (mAnalysisChannel) {
4018                    mAnalysisChannel->stop();
4019                }
4020                pthread_mutex_unlock(&mMutex);
4021                goto error_exit;
4022            }
4023        }
4024        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4025            it != mStreamInfo.end(); it++) {
4026            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4027            LOGH("Start Processing Channel mask=%d",
4028                     channel->getStreamTypeMask());
4029            rc = channel->start();
4030            if (rc < 0) {
4031                LOGE("channel start failed");
4032                pthread_mutex_unlock(&mMutex);
4033                goto error_exit;
4034            }
4035        }
4036
4037        if (mRawDumpChannel) {
4038            LOGD("Starting raw dump stream");
4039            rc = mRawDumpChannel->start();
4040            if (rc != NO_ERROR) {
4041                LOGE("Error Starting Raw Dump Channel");
4042                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4043                      it != mStreamInfo.end(); it++) {
4044                    QCamera3Channel *channel =
4045                        (QCamera3Channel *)(*it)->stream->priv;
4046                    LOGH("Stopping Processing Channel mask=%d",
4047                        channel->getStreamTypeMask());
4048                    channel->stop();
4049                }
4050                if (mSupportChannel)
4051                    mSupportChannel->stop();
4052                if (mAnalysisChannel) {
4053                    mAnalysisChannel->stop();
4054                }
4055                mMetadataChannel->stop();
4056                pthread_mutex_unlock(&mMutex);
4057                goto error_exit;
4058            }
4059        }
4060
4061        if (mChannelHandle) {
4062
4063            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4064                    mChannelHandle);
4065            if (rc != NO_ERROR) {
4066                LOGE("start_channel failed %d", rc);
4067                pthread_mutex_unlock(&mMutex);
4068                goto error_exit;
4069            }
4070        }
4071
4072        goto no_error;
4073error_exit:
4074        m_perfLock.lock_rel();
4075        return rc;
4076no_error:
4077        m_perfLock.lock_rel();
4078
4079        mWokenUpByDaemon = false;
4080        mPendingLiveRequest = 0;
4081        mFirstConfiguration = false;
4082        enablePowerHint();
4083    }
4084
4085    uint32_t frameNumber = request->frame_number;
4086    cam_stream_ID_t streamsArray;
4087
4088    if (mFlushPerf) {
4089        //we cannot accept any requests during flush
4090        LOGE("process_capture_request cannot proceed during flush");
4091        pthread_mutex_unlock(&mMutex);
4092        return NO_ERROR; //should return an error
4093    }
4094
4095    if (meta.exists(ANDROID_REQUEST_ID)) {
4096        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4097        mCurrentRequestId = request_id;
4098        LOGD("Received request with id: %d", request_id);
4099    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4100        LOGE("Unable to find request id field, \
4101                & no previous id available");
4102        pthread_mutex_unlock(&mMutex);
4103        return NAME_NOT_FOUND;
4104    } else {
4105        LOGD("Re-using old request id");
4106        request_id = mCurrentRequestId;
4107    }
4108
4109    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4110                                    request->num_output_buffers,
4111                                    request->input_buffer,
4112                                    frameNumber);
4113    // Acquire all request buffers first
4114    streamsArray.num_streams = 0;
4115    int blob_request = 0;
4116    uint32_t snapshotStreamId = 0;
4117    for (size_t i = 0; i < request->num_output_buffers; i++) {
4118        const camera3_stream_buffer_t& output = request->output_buffers[i];
4119        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4120
4121        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4122            //Call function to store local copy of jpeg data for encode params.
4123            blob_request = 1;
4124            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4125        }
4126
4127        if (output.acquire_fence != -1) {
4128           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4129           close(output.acquire_fence);
4130           if (rc != OK) {
4131              LOGE("sync wait failed %d", rc);
4132              pthread_mutex_unlock(&mMutex);
4133              return rc;
4134           }
4135        }
4136
4137        streamsArray.stream_request[streamsArray.num_streams++].streamID =
4138            channel->getStreamID(channel->getStreamTypeMask());
4139
4140        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4141            isVidBufRequested = true;
4142        }
4143    }
4144
4145    if (blob_request) {
4146        KPI_ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
4147    }
4148    if (blob_request && mRawDumpChannel) {
4149        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
4150        streamsArray.stream_request[streamsArray.num_streams].streamID =
4151            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
4152        streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4153    }
4154
4155    if(request->input_buffer == NULL) {
4156        /* Parse the settings:
4157         * - For every request in NORMAL MODE
4158         * - For every request in HFR mode during preview only case
4159         * - For first request of every batch in HFR mode during video
4160         * recording. In batchmode the same settings except frame number is
4161         * repeated in each request of the batch.
4162         */
4163        if (!mBatchSize ||
4164           (mBatchSize && !isVidBufRequested) ||
4165           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
4166            rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4167            if (rc < 0) {
4168                LOGE("fail to set frame parameters");
4169                pthread_mutex_unlock(&mMutex);
4170                return rc;
4171            }
4172        }
4173        /* For batchMode HFR, setFrameParameters is not called for every
4174         * request. But only frame number of the latest request is parsed.
4175         * Keep track of first and last frame numbers in a batch so that
4176         * metadata for the frame numbers of batch can be duplicated in
4177         * handleBatchMetadta */
4178        if (mBatchSize) {
4179            if (!mToBeQueuedVidBufs) {
4180                //start of the batch
4181                mFirstFrameNumberInBatch = request->frame_number;
4182            }
4183            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4184                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4185                LOGE("Failed to set the frame number in the parameters");
4186                return BAD_VALUE;
4187            }
4188        }
4189        if (mNeedSensorRestart) {
4190            /* Unlock the mutex as restartSensor waits on the channels to be
4191             * stopped, which in turn calls stream callback functions -
4192             * handleBufferWithLock and handleMetadataWithLock */
4193            pthread_mutex_unlock(&mMutex);
4194            rc = dynamicUpdateMetaStreamInfo();
4195            if (rc != NO_ERROR) {
4196                LOGE("Restarting the sensor failed");
4197                return BAD_VALUE;
4198            }
4199            mNeedSensorRestart = false;
4200            pthread_mutex_lock(&mMutex);
4201        }
4202    } else {
4203
4204        if (request->input_buffer->acquire_fence != -1) {
4205           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4206           close(request->input_buffer->acquire_fence);
4207           if (rc != OK) {
4208              LOGE("input buffer sync wait failed %d", rc);
4209              pthread_mutex_unlock(&mMutex);
4210              return rc;
4211           }
4212        }
4213    }
4214
4215    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4216        mLastCustIntentFrmNum = frameNumber;
4217    }
4218    /* Update pending request list and pending buffers map */
4219    PendingRequestInfo pendingRequest;
4220    pendingRequestIterator latestRequest;
4221    pendingRequest.frame_number = frameNumber;
4222    pendingRequest.num_buffers = request->num_output_buffers;
4223    pendingRequest.request_id = request_id;
4224    pendingRequest.blob_request = blob_request;
4225    pendingRequest.timestamp = 0;
4226    pendingRequest.bUrgentReceived = 0;
4227    if (request->input_buffer) {
4228        pendingRequest.input_buffer =
4229                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4230        *(pendingRequest.input_buffer) = *(request->input_buffer);
4231        pInputBuffer = pendingRequest.input_buffer;
4232    } else {
4233       pendingRequest.input_buffer = NULL;
4234       pInputBuffer = NULL;
4235    }
4236
4237    pendingRequest.pipeline_depth = 0;
4238    pendingRequest.partial_result_cnt = 0;
4239    extractJpegMetadata(mCurJpegMeta, request);
4240    pendingRequest.jpegMetadata = mCurJpegMeta;
4241    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4242    pendingRequest.shutter_notified = false;
4243
4244    //extract capture intent
4245    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4246        mCaptureIntent =
4247                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4248    }
4249    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
4250        mHybridAeEnable =
4251                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
4252    }
4253    pendingRequest.capture_intent = mCaptureIntent;
4254    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
4255    /* DevCamDebug metadata processCaptureRequest */
4256    if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4257        mDevCamDebugMetaEnable =
4258                meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4259    }
4260    pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4261    /* DevCamDebug metadata end */
4262
4263    //extract CAC info
4264    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4265        mCacMode =
4266                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4267    }
4268    pendingRequest.fwkCacMode = mCacMode;
4269
4270    PendingBuffersInRequest bufsForCurRequest;
4271    bufsForCurRequest.frame_number = frameNumber;
4272    // Mark current timestamp for the new request
4273    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4274
4275    for (size_t i = 0; i < request->num_output_buffers; i++) {
4276        RequestedBufferInfo requestedBuf;
4277        memset(&requestedBuf, 0, sizeof(requestedBuf));
4278        requestedBuf.stream = request->output_buffers[i].stream;
4279        requestedBuf.buffer = NULL;
4280        pendingRequest.buffers.push_back(requestedBuf);
4281
4282        // Add to buffer handle the pending buffers list
4283        PendingBufferInfo bufferInfo;
4284        bufferInfo.buffer = request->output_buffers[i].buffer;
4285        bufferInfo.stream = request->output_buffers[i].stream;
4286        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4287        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4288        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4289            frameNumber, bufferInfo.buffer,
4290            channel->getStreamTypeMask(), bufferInfo.stream->format);
4291    }
4292    // Add this request packet into mPendingBuffersMap
4293    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4294    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4295        mPendingBuffersMap.get_num_overall_buffers());
4296
4297    latestRequest = mPendingRequestsList.insert(
4298            mPendingRequestsList.end(), pendingRequest);
4299    if(mFlush) {
4300        LOGI("mFlush is true");
4301        pthread_mutex_unlock(&mMutex);
4302        return NO_ERROR;
4303    }
4304
4305    int indexUsed;
4306    // Notify metadata channel we receive a request
4307    mMetadataChannel->request(NULL, frameNumber, indexUsed);
4308
4309    if(request->input_buffer != NULL){
4310        LOGD("Input request, frame_number %d", frameNumber);
4311        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4312        if (NO_ERROR != rc) {
4313            LOGE("fail to set reproc parameters");
4314            pthread_mutex_unlock(&mMutex);
4315            return rc;
4316        }
4317    }
4318
4319    // Call request on other streams
4320    uint32_t streams_need_metadata = 0;
4321    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4322    for (size_t i = 0; i < request->num_output_buffers; i++) {
4323        const camera3_stream_buffer_t& output = request->output_buffers[i];
4324        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4325
4326        if (channel == NULL) {
4327            LOGW("invalid channel pointer for stream");
4328            continue;
4329        }
4330
4331        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4332            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4333                      output.buffer, request->input_buffer, frameNumber);
4334            if(request->input_buffer != NULL){
4335                rc = channel->request(output.buffer, frameNumber,
4336                        pInputBuffer, &mReprocMeta, indexUsed);
4337                if (rc < 0) {
4338                    LOGE("Fail to request on picture channel");
4339                    pthread_mutex_unlock(&mMutex);
4340                    return rc;
4341                }
4342            } else {
4343                LOGD("snapshot request with buffer %p, frame_number %d",
4344                         output.buffer, frameNumber);
4345                if (!request->settings) {
4346                    rc = channel->request(output.buffer, frameNumber,
4347                            NULL, mPrevParameters, indexUsed);
4348                } else {
4349                    rc = channel->request(output.buffer, frameNumber,
4350                            NULL, mParameters, indexUsed);
4351                }
4352                if (rc < 0) {
4353                    LOGE("Fail to request on picture channel");
4354                    pthread_mutex_unlock(&mMutex);
4355                    return rc;
4356                }
4357
4358                uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4359                uint32_t j = 0;
4360                for (j = 0; j < streamsArray.num_streams; j++) {
4361                    if (streamsArray.stream_request[j].streamID == streamId) {
4362                      if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4363                          streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4364                      else
4365                          streamsArray.stream_request[j].buf_index = indexUsed;
4366                        break;
4367                    }
4368                }
4369                if (j == streamsArray.num_streams) {
4370                    LOGE("Did not find matching stream to update index");
4371                    assert(0);
4372                }
4373
4374                pendingBufferIter->need_metadata = true;
4375                streams_need_metadata++;
4376            }
4377        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4378            bool needMetadata = false;
4379            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4380            rc = yuvChannel->request(output.buffer, frameNumber,
4381                    pInputBuffer,
4382                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
4383            if (rc < 0) {
4384                LOGE("Fail to request on YUV channel");
4385                pthread_mutex_unlock(&mMutex);
4386                return rc;
4387            }
4388
4389            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4390            uint32_t j = 0;
4391            for (j = 0; j < streamsArray.num_streams; j++) {
4392                if (streamsArray.stream_request[j].streamID == streamId) {
4393                    if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4394                        streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4395                    else
4396                        streamsArray.stream_request[j].buf_index = indexUsed;
4397                    break;
4398                }
4399            }
4400            if (j == streamsArray.num_streams) {
4401                LOGE("Did not find matching stream to update index");
4402                assert(0);
4403            }
4404
4405            pendingBufferIter->need_metadata = needMetadata;
4406            if (needMetadata)
4407                streams_need_metadata += 1;
4408            LOGD("calling YUV channel request, need_metadata is %d",
4409                     needMetadata);
4410        } else {
4411            LOGD("request with buffer %p, frame_number %d",
4412                  output.buffer, frameNumber);
4413
4414            rc = channel->request(output.buffer, frameNumber, indexUsed);
4415
4416            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4417            uint32_t j = 0;
4418            for (j = 0; j < streamsArray.num_streams; j++) {
4419                if (streamsArray.stream_request[j].streamID == streamId) {
4420                    if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4421                        streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4422                    else
4423                        streamsArray.stream_request[j].buf_index = indexUsed;
4424                    break;
4425                }
4426            }
4427            if (j == streamsArray.num_streams) {
4428                LOGE("Did not find matching stream to update index");
4429                assert(0);
4430            }
4431
4432            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4433                    && mBatchSize) {
4434                mToBeQueuedVidBufs++;
4435                if (mToBeQueuedVidBufs == mBatchSize) {
4436                    channel->queueBatchBuf();
4437                }
4438            }
4439            if (rc < 0) {
4440                LOGE("request failed");
4441                pthread_mutex_unlock(&mMutex);
4442                return rc;
4443            }
4444        }
4445        pendingBufferIter++;
4446    }
4447
4448    //If 2 streams have need_metadata set to true, fail the request, unless
4449    //we copy/reference count the metadata buffer
4450    if (streams_need_metadata > 1) {
4451        LOGE("not supporting request in which two streams requires"
4452                " 2 HAL metadata for reprocessing");
4453        pthread_mutex_unlock(&mMutex);
4454        return -EINVAL;
4455    }
4456
4457    if (request->input_buffer == NULL) {
4458        /* Set the parameters to backend:
4459         * - For every request in NORMAL MODE
4460         * - For every request in HFR mode during preview only case
4461         * - Once every batch in HFR mode during video recording
4462         */
4463        if (!mBatchSize ||
4464           (mBatchSize && !isVidBufRequested) ||
4465           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4466            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4467                     mBatchSize, isVidBufRequested,
4468                    mToBeQueuedVidBufs);
4469
4470            if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4471                for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4472                    uint32_t m = 0;
4473                    for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4474                        if (streamsArray.stream_request[k].streamID ==
4475                                mBatchedStreamsArray.stream_request[m].streamID)
4476                            break;
4477                        }
4478                        if (m == mBatchedStreamsArray.num_streams) {
4479                            mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4480                                streamsArray.stream_request[k].streamID;
4481                            mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4482                                streamsArray.stream_request[k].buf_index;
4483                            mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4484                        }
4485                }
4486                streamsArray = mBatchedStreamsArray;
4487            }
4488            /* Update stream id of all the requested buffers */
4489            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4490                LOGE("Failed to set stream type mask in the parameters");
4491                return BAD_VALUE;
4492            }
4493
4494            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4495                    mParameters);
4496            if (rc < 0) {
4497                LOGE("set_parms failed");
4498            }
4499            /* reset to zero coz, the batch is queued */
4500            mToBeQueuedVidBufs = 0;
4501            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4502            memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4503        } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4504            for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4505                uint32_t m = 0;
4506                for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4507                    if (streamsArray.stream_request[k].streamID ==
4508                            mBatchedStreamsArray.stream_request[m].streamID)
4509                        break;
4510                }
4511                if (m == mBatchedStreamsArray.num_streams) {
4512                    mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4513                        streamsArray.stream_request[k].streamID;
4514                    mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4515                        streamsArray.stream_request[k].buf_index;
4516                    mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4517                }
4518            }
4519        }
4520        mPendingLiveRequest++;
4521    }
4522
4523    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4524
4525    mState = STARTED;
4526    // Added a timed condition wait
4527    struct timespec ts;
4528    uint8_t isValidTimeout = 1;
4529    rc = clock_gettime(CLOCK_MONOTONIC, &ts);
4530    if (rc < 0) {
4531      isValidTimeout = 0;
4532      LOGE("Error reading the real time clock!!");
4533    }
4534    else {
4535      // Make timeout as 5 sec for request to be honored
4536      ts.tv_sec += 5;
4537    }
4538    //Block on conditional variable
4539    while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4540            (mState != ERROR) && (mState != DEINIT)) {
4541        if (!isValidTimeout) {
4542            LOGD("Blocking on conditional wait");
4543            pthread_cond_wait(&mRequestCond, &mMutex);
4544        }
4545        else {
4546            LOGD("Blocking on timed conditional wait");
4547            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4548            if (rc == ETIMEDOUT) {
4549                rc = -ENODEV;
4550                LOGE("Unblocked on timeout!!!!");
4551                break;
4552            }
4553        }
4554        LOGD("Unblocked");
4555        if (mWokenUpByDaemon) {
4556            mWokenUpByDaemon = false;
4557            if (mPendingLiveRequest < mMaxInFlightRequests)
4558                break;
4559        }
4560    }
4561    pthread_mutex_unlock(&mMutex);
4562
4563    return rc;
4564}
4565
4566/*===========================================================================
4567 * FUNCTION   : dump
4568 *
4569 * DESCRIPTION:
4570 *
4571 * PARAMETERS :
4572 *
4573 *
4574 * RETURN     :
4575 *==========================================================================*/
4576void QCamera3HardwareInterface::dump(int fd)
4577{
4578    pthread_mutex_lock(&mMutex);
4579    dprintf(fd, "\n Camera HAL3 information Begin \n");
4580
4581    dprintf(fd, "\nNumber of pending requests: %zu \n",
4582        mPendingRequestsList.size());
4583    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4584    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4585    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4586    for(pendingRequestIterator i = mPendingRequestsList.begin();
4587            i != mPendingRequestsList.end(); i++) {
4588        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4589        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4590        i->input_buffer);
4591    }
4592    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4593                mPendingBuffersMap.get_num_overall_buffers());
4594    dprintf(fd, "-------+------------------\n");
4595    dprintf(fd, " Frame | Stream type mask \n");
4596    dprintf(fd, "-------+------------------\n");
4597    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4598        for(auto &j : req.mPendingBufferList) {
4599            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4600            dprintf(fd, " %5d | %11d \n",
4601                    req.frame_number, channel->getStreamTypeMask());
4602        }
4603    }
4604    dprintf(fd, "-------+------------------\n");
4605
4606    dprintf(fd, "\nPending frame drop list: %zu\n",
4607        mPendingFrameDropList.size());
4608    dprintf(fd, "-------+-----------\n");
4609    dprintf(fd, " Frame | Stream ID \n");
4610    dprintf(fd, "-------+-----------\n");
4611    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4612        i != mPendingFrameDropList.end(); i++) {
4613        dprintf(fd, " %5d | %9d \n",
4614            i->frame_number, i->stream_ID);
4615    }
4616    dprintf(fd, "-------+-----------\n");
4617
4618    dprintf(fd, "\n Camera HAL3 information End \n");
4619
4620    /* use dumpsys media.camera as trigger to send update debug level event */
4621    mUpdateDebugLevel = true;
4622    pthread_mutex_unlock(&mMutex);
4623    return;
4624}
4625
4626/*===========================================================================
4627 * FUNCTION   : flush
4628 *
4629 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4630 *              conditionally restarts channels
4631 *
4632 * PARAMETERS :
4633 *  @ restartChannels: re-start all channels
4634 *
4635 *
4636 * RETURN     :
4637 *          0 on success
4638 *          Error code on failure
4639 *==========================================================================*/
4640int QCamera3HardwareInterface::flush(bool restartChannels)
4641{
4642    KPI_ATRACE_CALL();
4643    int32_t rc = NO_ERROR;
4644
4645    LOGD("Unblocking Process Capture Request");
4646    pthread_mutex_lock(&mMutex);
4647    mFlush = true;
4648    pthread_mutex_unlock(&mMutex);
4649
4650    rc = stopAllChannels();
4651    // unlink of dualcam
4652    if (mIsDeviceLinked) {
4653        m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4654        pthread_mutex_lock(&gCamLock);
4655
4656        if (mIsMainCamera == 1) {
4657            m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4658            m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4659            // related session id should be session id of linked session
4660            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4661        } else {
4662            m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4663            m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4664            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4665        }
4666        pthread_mutex_unlock(&gCamLock);
4667
4668        rc = mCameraHandle->ops->sync_related_sensors(
4669                mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4670        if (rc < 0) {
4671            LOGE("Dualcam: Unlink failed, but still proceed to close");
4672        }
4673    }
4674
4675    if (rc < 0) {
4676        LOGE("stopAllChannels failed");
4677        return rc;
4678    }
4679    if (mChannelHandle) {
4680        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4681                mChannelHandle);
4682    }
4683
4684    // Reset bundle info
4685    rc = setBundleInfo();
4686    if (rc < 0) {
4687        LOGE("setBundleInfo failed %d", rc);
4688        return rc;
4689    }
4690
4691    // Mutex Lock
4692    pthread_mutex_lock(&mMutex);
4693
4694    // Unblock process_capture_request
4695    mPendingLiveRequest = 0;
4696    pthread_cond_signal(&mRequestCond);
4697
4698    rc = notifyErrorForPendingRequests();
4699    if (rc < 0) {
4700        LOGE("notifyErrorForPendingRequests failed");
4701        pthread_mutex_unlock(&mMutex);
4702        return rc;
4703    }
4704
4705    mFlush = false;
4706
4707    // Start the Streams/Channels
4708    if (restartChannels) {
4709        rc = startAllChannels();
4710        if (rc < 0) {
4711            LOGE("startAllChannels failed");
4712            pthread_mutex_unlock(&mMutex);
4713            return rc;
4714        }
4715    }
4716
4717    if (mChannelHandle) {
4718        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4719                    mChannelHandle);
4720        if (rc < 0) {
4721            LOGE("start_channel failed");
4722            pthread_mutex_unlock(&mMutex);
4723            return rc;
4724        }
4725    }
4726
4727    pthread_mutex_unlock(&mMutex);
4728
4729    return 0;
4730}
4731
4732/*===========================================================================
4733 * FUNCTION   : flushPerf
4734 *
4735 * DESCRIPTION: This is the performance optimization version of flush that does
4736 *              not use stream off, rather flushes the system
4737 *
4738 * PARAMETERS :
4739 *
4740 *
4741 * RETURN     : 0 : success
4742 *              -EINVAL: input is malformed (device is not valid)
4743 *              -ENODEV: if the device has encountered a serious error
4744 *==========================================================================*/
4745int QCamera3HardwareInterface::flushPerf()
4746{
4747    ATRACE_CALL();
4748    int32_t rc = 0;
4749    struct timespec timeout;
4750    bool timed_wait = false;
4751
4752    pthread_mutex_lock(&mMutex);
4753    mFlushPerf = true;
4754    mPendingBuffersMap.numPendingBufsAtFlush =
4755        mPendingBuffersMap.get_num_overall_buffers();
4756    LOGD("Calling flush. Wait for %d buffers to return",
4757        mPendingBuffersMap.numPendingBufsAtFlush);
4758
4759    /* send the flush event to the backend */
4760    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4761    if (rc < 0) {
4762        LOGE("Error in flush: IOCTL failure");
4763        mFlushPerf = false;
4764        pthread_mutex_unlock(&mMutex);
4765        return -ENODEV;
4766    }
4767
4768    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4769        LOGD("No pending buffers in HAL, return flush");
4770        mFlushPerf = false;
4771        pthread_mutex_unlock(&mMutex);
4772        return rc;
4773    }
4774
4775    /* wait on a signal that buffers were received */
4776    rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
4777    if (rc < 0) {
4778        LOGE("Error reading the real time clock, cannot use timed wait");
4779    } else {
4780        timeout.tv_sec += FLUSH_TIMEOUT;
4781        timed_wait = true;
4782    }
4783
4784    //Block on conditional variable
4785    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4786        LOGD("Waiting on mBuffersCond");
4787        if (!timed_wait) {
4788            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4789            if (rc != 0) {
4790                 LOGE("pthread_cond_wait failed due to rc = %s",
4791                        strerror(rc));
4792                 break;
4793            }
4794        } else {
4795            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4796            if (rc != 0) {
4797                LOGE("pthread_cond_timedwait failed due to rc = %s",
4798                            strerror(rc));
4799                break;
4800            }
4801        }
4802    }
4803    if (rc != 0) {
4804        mFlushPerf = false;
4805        pthread_mutex_unlock(&mMutex);
4806        return -ENODEV;
4807    }
4808
4809    LOGD("Received buffers, now safe to return them");
4810
4811    //make sure the channels handle flush
4812    //currently only required for the picture channel to release snapshot resources
4813    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4814            it != mStreamInfo.end(); it++) {
4815        QCamera3Channel *channel = (*it)->channel;
4816        if (channel) {
4817            rc = channel->flush();
4818            if (rc) {
4819               LOGE("Flushing the channels failed with error %d", rc);
4820               // even though the channel flush failed we need to continue and
4821               // return the buffers we have to the framework, however the return
4822               // value will be an error
4823               rc = -ENODEV;
4824            }
4825        }
4826    }
4827
4828    /* notify the frameworks and send errored results */
4829    rc = notifyErrorForPendingRequests();
4830    if (rc < 0) {
4831        LOGE("notifyErrorForPendingRequests failed");
4832        pthread_mutex_unlock(&mMutex);
4833        return rc;
4834    }
4835
4836    //unblock process_capture_request
4837    mPendingLiveRequest = 0;
4838    unblockRequestIfNecessary();
4839
4840    mFlushPerf = false;
4841    pthread_mutex_unlock(&mMutex);
4842    LOGD ("Flush Operation complete. rc = %d", rc);
4843    return rc;
4844}
4845
4846/*===========================================================================
4847 * FUNCTION   : handleCameraDeviceError
4848 *
4849 * DESCRIPTION: This function calls internal flush and notifies the error to
4850 *              framework and updates the state variable.
4851 *
4852 * PARAMETERS : None
4853 *
4854 * RETURN     : NO_ERROR on Success
4855 *              Error code on failure
4856 *==========================================================================*/
4857int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4858{
4859    int32_t rc = NO_ERROR;
4860
4861    pthread_mutex_lock(&mMutex);
4862    if (mState != ERROR) {
4863        //if mState != ERROR, nothing to be done
4864        pthread_mutex_unlock(&mMutex);
4865        return NO_ERROR;
4866    }
4867    pthread_mutex_unlock(&mMutex);
4868
4869    rc = flush(false /* restart channels */);
4870    if (NO_ERROR != rc) {
4871        LOGE("internal flush to handle mState = ERROR failed");
4872    }
4873
4874    pthread_mutex_lock(&mMutex);
4875    mState = DEINIT;
4876    pthread_mutex_unlock(&mMutex);
4877
4878    camera3_notify_msg_t notify_msg;
4879    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4880    notify_msg.type = CAMERA3_MSG_ERROR;
4881    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4882    notify_msg.message.error.error_stream = NULL;
4883    notify_msg.message.error.frame_number = 0;
4884    mCallbackOps->notify(mCallbackOps, &notify_msg);
4885
4886    return rc;
4887}
4888
4889/*===========================================================================
4890 * FUNCTION   : captureResultCb
4891 *
4892 * DESCRIPTION: Callback handler for all capture result
4893 *              (streams, as well as metadata)
4894 *
4895 * PARAMETERS :
4896 *   @metadata : metadata information
4897 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4898 *               NULL if metadata.
4899 *
4900 * RETURN     : NONE
4901 *==========================================================================*/
4902void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4903                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4904{
4905    if (metadata_buf) {
4906        pthread_mutex_lock(&mMutex);
4907        uint8_t batchSize = mBatchSize;
4908        pthread_mutex_unlock(&mMutex);
4909        if (batchSize) {
4910            handleBatchMetadata(metadata_buf,
4911                    true /* free_and_bufdone_meta_buf */);
4912        } else { /* mBatchSize = 0 */
4913            hdrPlusPerfLock(metadata_buf);
4914            pthread_mutex_lock(&mMutex);
4915            handleMetadataWithLock(metadata_buf,
4916                    true /* free_and_bufdone_meta_buf */,
4917                    true /* last urgent frame of batch metadata */,
4918                    true /* last frame of batch metadata */ );
4919            pthread_mutex_unlock(&mMutex);
4920        }
4921    } else if (isInputBuffer) {
4922        pthread_mutex_lock(&mMutex);
4923        handleInputBufferWithLock(frame_number);
4924        pthread_mutex_unlock(&mMutex);
4925    } else {
4926        pthread_mutex_lock(&mMutex);
4927        handleBufferWithLock(buffer, frame_number);
4928        pthread_mutex_unlock(&mMutex);
4929    }
4930    return;
4931}
4932
4933/*===========================================================================
4934 * FUNCTION   : getReprocessibleOutputStreamId
4935 *
4936 * DESCRIPTION: Get source output stream id for the input reprocess stream
4937 *              based on size and format, which would be the largest
4938 *              output stream if an input stream exists.
4939 *
4940 * PARAMETERS :
4941 *   @id      : return the stream id if found
4942 *
4943 * RETURN     : int32_t type of status
4944 *              NO_ERROR  -- success
4945 *              none-zero failure code
4946 *==========================================================================*/
4947int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4948{
4949    /* check if any output or bidirectional stream with the same size and format
4950       and return that stream */
4951    if ((mInputStreamInfo.dim.width > 0) &&
4952            (mInputStreamInfo.dim.height > 0)) {
4953        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4954                it != mStreamInfo.end(); it++) {
4955
4956            camera3_stream_t *stream = (*it)->stream;
4957            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4958                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4959                    (stream->format == mInputStreamInfo.format)) {
4960                // Usage flag for an input stream and the source output stream
4961                // may be different.
4962                LOGD("Found reprocessible output stream! %p", *it);
4963                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4964                         stream->usage, mInputStreamInfo.usage);
4965
4966                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4967                if (channel != NULL && channel->mStreams[0]) {
4968                    id = channel->mStreams[0]->getMyServerID();
4969                    return NO_ERROR;
4970                }
4971            }
4972        }
4973    } else {
4974        LOGD("No input stream, so no reprocessible output stream");
4975    }
4976    return NAME_NOT_FOUND;
4977}
4978
4979/*===========================================================================
4980 * FUNCTION   : lookupFwkName
4981 *
4982 * DESCRIPTION: In case the enum is not same in fwk and backend
4983 *              make sure the parameter is correctly propogated
4984 *
4985 * PARAMETERS  :
4986 *   @arr      : map between the two enums
4987 *   @len      : len of the map
4988 *   @hal_name : name of the hal_parm to map
4989 *
4990 * RETURN     : int type of status
4991 *              fwk_name  -- success
4992 *              none-zero failure code
4993 *==========================================================================*/
4994template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4995        size_t len, halType hal_name)
4996{
4997
4998    for (size_t i = 0; i < len; i++) {
4999        if (arr[i].hal_name == hal_name) {
5000            return arr[i].fwk_name;
5001        }
5002    }
5003
5004    /* Not able to find matching framework type is not necessarily
5005     * an error case. This happens when mm-camera supports more attributes
5006     * than the frameworks do */
5007    LOGH("Cannot find matching framework type");
5008    return NAME_NOT_FOUND;
5009}
5010
5011/*===========================================================================
5012 * FUNCTION   : lookupHalName
5013 *
5014 * DESCRIPTION: In case the enum is not same in fwk and backend
5015 *              make sure the parameter is correctly propogated
5016 *
5017 * PARAMETERS  :
5018 *   @arr      : map between the two enums
5019 *   @len      : len of the map
5020 *   @fwk_name : name of the hal_parm to map
5021 *
5022 * RETURN     : int32_t type of status
5023 *              hal_name  -- success
5024 *              none-zero failure code
5025 *==========================================================================*/
5026template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5027        size_t len, fwkType fwk_name)
5028{
5029    for (size_t i = 0; i < len; i++) {
5030        if (arr[i].fwk_name == fwk_name) {
5031            return arr[i].hal_name;
5032        }
5033    }
5034
5035    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5036    return NAME_NOT_FOUND;
5037}
5038
5039/*===========================================================================
5040 * FUNCTION   : lookupProp
5041 *
5042 * DESCRIPTION: lookup a value by its name
5043 *
5044 * PARAMETERS :
5045 *   @arr     : map between the two enums
5046 *   @len     : size of the map
5047 *   @name    : name to be looked up
5048 *
5049 * RETURN     : Value if found
5050 *              CAM_CDS_MODE_MAX if not found
5051 *==========================================================================*/
5052template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5053        size_t len, const char *name)
5054{
5055    if (name) {
5056        for (size_t i = 0; i < len; i++) {
5057            if (!strcmp(arr[i].desc, name)) {
5058                return arr[i].val;
5059            }
5060        }
5061    }
5062    return CAM_CDS_MODE_MAX;
5063}
5064
5065/*===========================================================================
5066 *
5067 * DESCRIPTION:
5068 *
5069 * PARAMETERS :
5070 *   @metadata : metadata information from callback
5071 *   @timestamp: metadata buffer timestamp
5072 *   @request_id: request id
5073 *   @jpegMetadata: additional jpeg metadata
5074 *   @hybrid_ae_enable: whether hybrid ae is enabled
5075 *   // DevCamDebug metadata
5076 *   @DevCamDebug_meta_enable: enable DevCamDebug meta
5077 *   // DevCamDebug metadata end
5078 *   @pprocDone: whether internal offline postprocsesing is done
5079 *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
5080 *                         in a batch. Always true for non-batch mode.
5081 *
5082 * RETURN     : camera_metadata_t*
5083 *              metadata in a format specified by fwk
5084 *==========================================================================*/
5085camera_metadata_t*
5086QCamera3HardwareInterface::translateFromHalMetadata(
5087                                 metadata_buffer_t *metadata,
5088                                 nsecs_t timestamp,
5089                                 int32_t request_id,
5090                                 const CameraMetadata& jpegMetadata,
5091                                 uint8_t pipeline_depth,
5092                                 uint8_t capture_intent,
5093                                 uint8_t hybrid_ae_enable,
5094                                 /* DevCamDebug metadata translateFromHalMetadata argument */
5095                                 uint8_t DevCamDebug_meta_enable,
5096                                 /* DevCamDebug metadata end */
5097                                 bool pprocDone,
5098                                 uint8_t fwk_cacMode,
5099                                 bool lastMetadataInBatch)
5100{
5101    CameraMetadata camMetadata;
5102    camera_metadata_t *resultMetadata;
5103
5104    if (!lastMetadataInBatch) {
5105        /* In batch mode, use empty metadata if this is not the last in batch*/
5106        resultMetadata = allocate_camera_metadata(0, 0);
5107        return resultMetadata;
5108    }
5109
5110    if (jpegMetadata.entryCount())
5111        camMetadata.append(jpegMetadata);
5112
5113    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5114    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5115    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5116    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
5117    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
5118    if (mBatchSize == 0) {
5119        // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5120        camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5121    }
5122
5123    // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5124    // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5125    if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5126        // DevCamDebug metadata translateFromHalMetadata AF
5127        IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5128                CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5129            int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5130            camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5131        }
5132        IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5133                CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5134            int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5135            camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5136        }
5137        IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5138                CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5139            int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5140            camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5141        }
5142        IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5143                CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5144            int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5145            camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5146        }
5147        IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5148                CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5149            int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5150            camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5151        }
5152        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5153                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5154            int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5155                *DevCamDebug_af_monitor_pdaf_target_pos;
5156            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5157                &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5158        }
5159        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5160                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5161            int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5162                *DevCamDebug_af_monitor_pdaf_confidence;
5163            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5164                &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5165        }
5166        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5167                CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5168            int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5169            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5170                &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5171        }
5172        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5173                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5174            int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5175                *DevCamDebug_af_monitor_tof_target_pos;
5176            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5177                &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5178        }
5179        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5180                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5181            int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5182                *DevCamDebug_af_monitor_tof_confidence;
5183            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5184                &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5185        }
5186        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5187                CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5188            int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5189            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5190                &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5191        }
5192        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5193                CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5194            int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5195            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5196                &fwk_DevCamDebug_af_monitor_type_select, 1);
5197        }
5198        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5199                CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5200            int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5201            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5202                &fwk_DevCamDebug_af_monitor_refocus, 1);
5203        }
5204        IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5205                CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5206            int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5207            camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5208                &fwk_DevCamDebug_af_monitor_target_pos, 1);
5209        }
5210        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5211                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5212            int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5213                *DevCamDebug_af_search_pdaf_target_pos;
5214            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5215                &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5216        }
5217        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5218                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5219            int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5220            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5221                &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5222        }
5223        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5224                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5225            int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5226            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5227                &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5228        }
5229        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5230                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5231            int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5232            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5233                &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5234        }
5235        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5236                CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5237            int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5238            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5239                &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5240        }
5241        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5242                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5243            int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5244                *DevCamDebug_af_search_tof_target_pos;
5245            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5246                &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5247        }
5248        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5249                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5250            int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5251            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5252                &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5253        }
5254        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5255                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5256            int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5257            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5258                &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5259        }
5260        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5261                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5262            int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5263            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5264                &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5265        }
5266        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5267                CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5268            int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5269            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5270                &fwk_DevCamDebug_af_search_tof_confidence, 1);
5271        }
5272        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5273                CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5274            int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5275            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5276                &fwk_DevCamDebug_af_search_type_select, 1);
5277        }
5278        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5279                CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5280            int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5281            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5282                &fwk_DevCamDebug_af_search_next_pos, 1);
5283        }
5284        IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5285                CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5286            int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5287            camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5288                &fwk_DevCamDebug_af_search_target_pos, 1);
5289        }
5290        // DevCamDebug metadata translateFromHalMetadata AEC
5291        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5292                CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5293            int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5294            camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5295    }
5296        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5297                CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5298            int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5299            camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5300        }
5301        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5302                CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5303            int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5304            camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5305        }
5306        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5307                CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5308            int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5309            camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5310        }
5311        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5312                CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5313            int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5314            camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5315        }
5316        IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5317                CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5318            float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5319            camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5320        }
5321        IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5322                CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5323            int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5324            camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5325        }
5326        IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5327                CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5328            float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5329            camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5330        }
5331        // DevCamDebug metadata translateFromHalMetadata AWB
5332        IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5333                CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5334            float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5335            camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5336        }
5337        IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5338                CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5339            float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5340            camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5341        }
5342        IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5343                CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5344            float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5345            camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5346        }
5347        IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5348                CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5349            int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5350            camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5351        }
5352        IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5353                CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5354            int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5355            camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5356        }
5357    }
5358    // atrace_end(ATRACE_TAG_ALWAYS);
5359
5360    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5361        int64_t fwk_frame_number = *frame_number;
5362        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5363    }
5364
5365    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5366        int32_t fps_range[2];
5367        fps_range[0] = (int32_t)float_range->min_fps;
5368        fps_range[1] = (int32_t)float_range->max_fps;
5369        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5370                                      fps_range, 2);
5371        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5372             fps_range[0], fps_range[1]);
5373    }
5374
5375    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5376        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5377    }
5378
5379    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5380        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5381                METADATA_MAP_SIZE(SCENE_MODES_MAP),
5382                *sceneMode);
5383        if (NAME_NOT_FOUND != val) {
5384            uint8_t fwkSceneMode = (uint8_t)val;
5385            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5386            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5387                     fwkSceneMode);
5388        }
5389    }
5390
5391    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5392        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5393        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5394    }
5395
5396    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5397        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5398        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5399    }
5400
5401    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5402        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5403        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5404    }
5405
5406    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5407            CAM_INTF_META_EDGE_MODE, metadata) {
5408        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5409    }
5410
5411    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5412        uint8_t fwk_flashPower = (uint8_t) *flashPower;
5413        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5414    }
5415
5416    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5417        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5418    }
5419
5420    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5421        if (0 <= *flashState) {
5422            uint8_t fwk_flashState = (uint8_t) *flashState;
5423            if (!gCamCapability[mCameraId]->flash_available) {
5424                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5425            }
5426            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5427        }
5428    }
5429
5430    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5431        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5432        if (NAME_NOT_FOUND != val) {
5433            uint8_t fwk_flashMode = (uint8_t)val;
5434            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5435        }
5436    }
5437
5438    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5439        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5440        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5441    }
5442
5443    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5444        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5445    }
5446
5447    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5448        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5449    }
5450
5451    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5452        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5453    }
5454
5455    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5456        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5457        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5458    }
5459
5460    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5461        uint8_t fwk_videoStab = (uint8_t) *videoStab;
5462        LOGD("fwk_videoStab = %d", fwk_videoStab);
5463        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5464    } else {
5465        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5466        // and so hardcoding the Video Stab result to OFF mode.
5467        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5468        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5469        LOGD("%s: EIS result default to OFF mode", __func__);
5470    }
5471
5472    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5473        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5474        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5475    }
5476
5477    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5478        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5479    }
5480
5481    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5482        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5483        float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
5484
5485        adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5486              gCamCapability[mCameraId]->color_arrangement);
5487
5488        LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
5489          blackLevelAppliedPattern->cam_black_level[0],
5490          blackLevelAppliedPattern->cam_black_level[1],
5491          blackLevelAppliedPattern->cam_black_level[2],
5492          blackLevelAppliedPattern->cam_black_level[3]);
5493        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5494                BLACK_LEVEL_PATTERN_CNT);
5495
5496        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5497        // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
5498        // depth space.
5499        fwk_blackLevelInd[0] /= 4.0;
5500        fwk_blackLevelInd[1] /= 4.0;
5501        fwk_blackLevelInd[2] /= 4.0;
5502        fwk_blackLevelInd[3] /= 4.0;
5503        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
5504                BLACK_LEVEL_PATTERN_CNT);
5505    }
5506
5507    // Fixed whitelevel is used by ISP/Sensor
5508    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5509            &gCamCapability[mCameraId]->white_level, 1);
5510
5511    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5512            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5513        int32_t scalerCropRegion[4];
5514        scalerCropRegion[0] = hScalerCropRegion->left;
5515        scalerCropRegion[1] = hScalerCropRegion->top;
5516        scalerCropRegion[2] = hScalerCropRegion->width;
5517        scalerCropRegion[3] = hScalerCropRegion->height;
5518
5519        // Adjust crop region from sensor output coordinate system to active
5520        // array coordinate system.
5521        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5522                scalerCropRegion[2], scalerCropRegion[3]);
5523
5524        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5525    }
5526
5527    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5528        LOGD("sensorExpTime = %lld", *sensorExpTime);
5529        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5530    }
5531
5532    IF_META_AVAILABLE(int64_t, sensorFameDuration,
5533            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5534        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5535        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5536    }
5537
5538    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5539            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5540        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5541        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5542                sensorRollingShutterSkew, 1);
5543    }
5544
5545    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5546        LOGD("sensorSensitivity = %d", *sensorSensitivity);
5547        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5548
5549        //calculate the noise profile based on sensitivity
5550        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5551        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5552        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5553        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5554            noise_profile[i]   = noise_profile_S;
5555            noise_profile[i+1] = noise_profile_O;
5556        }
5557        LOGD("noise model entry (S, O) is (%f, %f)",
5558                noise_profile_S, noise_profile_O);
5559        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5560                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5561    }
5562
5563    int32_t fwk_ispSensitivity = 100;
5564    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5565        fwk_ispSensitivity = (int32_t) *ispSensitivity;
5566    }
5567    IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5568        fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5569    }
5570    camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5571
5572    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5573        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5574        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5575    }
5576
5577    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5578        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5579                *faceDetectMode);
5580        if (NAME_NOT_FOUND != val) {
5581            uint8_t fwk_faceDetectMode = (uint8_t)val;
5582            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5583
5584            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5585                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5586                        CAM_INTF_META_FACE_DETECTION, metadata) {
5587                    uint8_t numFaces = MIN(
5588                            faceDetectionInfo->num_faces_detected, MAX_ROI);
5589                    int32_t faceIds[MAX_ROI];
5590                    uint8_t faceScores[MAX_ROI];
5591                    int32_t faceRectangles[MAX_ROI * 4];
5592                    int32_t faceLandmarks[MAX_ROI * 6];
5593                    size_t j = 0, k = 0;
5594
5595                    for (size_t i = 0; i < numFaces; i++) {
5596                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5597                        // Adjust crop region from sensor output coordinate system to active
5598                        // array coordinate system.
5599                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5600                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
5601                                rect.width, rect.height);
5602
5603                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5604                                faceRectangles+j, -1);
5605
5606                        j+= 4;
5607                    }
5608                    if (numFaces <= 0) {
5609                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5610                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5611                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5612                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5613                    }
5614
5615                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5616                            numFaces);
5617                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5618                            faceRectangles, numFaces * 4U);
5619                    if (fwk_faceDetectMode ==
5620                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5621                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5622                                CAM_INTF_META_FACE_LANDMARK, metadata) {
5623
5624                            for (size_t i = 0; i < numFaces; i++) {
5625                                // Map the co-ordinate sensor output coordinate system to active
5626                                // array coordinate system.
5627                                mCropRegionMapper.toActiveArray(
5628                                        landmarks->face_landmarks[i].left_eye_center.x,
5629                                        landmarks->face_landmarks[i].left_eye_center.y);
5630                                mCropRegionMapper.toActiveArray(
5631                                        landmarks->face_landmarks[i].right_eye_center.x,
5632                                        landmarks->face_landmarks[i].right_eye_center.y);
5633                                mCropRegionMapper.toActiveArray(
5634                                        landmarks->face_landmarks[i].mouth_center.x,
5635                                        landmarks->face_landmarks[i].mouth_center.y);
5636
5637                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5638                                k+= 6;
5639                            }
5640                        }
5641
5642                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5643                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5644                                faceLandmarks, numFaces * 6U);
5645                   }
5646                }
5647            }
5648        }
5649    }
5650
5651    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5652        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5653        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5654    }
5655
5656    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5657            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5658        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5659        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5660    }
5661
5662    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5663            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5664        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5665                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5666    }
5667
5668    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5669            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5670        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5671                CAM_MAX_SHADING_MAP_HEIGHT);
5672        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5673                CAM_MAX_SHADING_MAP_WIDTH);
5674        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5675                lensShadingMap->lens_shading, 4U * map_width * map_height);
5676    }
5677
5678    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5679        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5680        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5681    }
5682
5683    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5684        //Populate CAM_INTF_META_TONEMAP_CURVES
5685        /* ch0 = G, ch 1 = B, ch 2 = R*/
5686        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5687            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5688                     tonemap->tonemap_points_cnt,
5689                    CAM_MAX_TONEMAP_CURVE_SIZE);
5690            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5691        }
5692
5693        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5694                        &tonemap->curves[0].tonemap_points[0][0],
5695                        tonemap->tonemap_points_cnt * 2);
5696
5697        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5698                        &tonemap->curves[1].tonemap_points[0][0],
5699                        tonemap->tonemap_points_cnt * 2);
5700
5701        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5702                        &tonemap->curves[2].tonemap_points[0][0],
5703                        tonemap->tonemap_points_cnt * 2);
5704    }
5705
5706    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5707            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5708        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5709                CC_GAINS_COUNT);
5710    }
5711
5712    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5713            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5714        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5715                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5716                CC_MATRIX_COLS * CC_MATRIX_ROWS);
5717    }
5718
5719    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5720            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5721        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5722            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5723                     toneCurve->tonemap_points_cnt,
5724                    CAM_MAX_TONEMAP_CURVE_SIZE);
5725            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5726        }
5727        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5728                (float*)toneCurve->curve.tonemap_points,
5729                toneCurve->tonemap_points_cnt * 2);
5730    }
5731
5732    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5733            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5734        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5735                predColorCorrectionGains->gains, 4);
5736    }
5737
5738    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5739            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5740        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5741                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5742                CC_MATRIX_ROWS * CC_MATRIX_COLS);
5743    }
5744
5745    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5746        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5747    }
5748
5749    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5750        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5751        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5752    }
5753
5754    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5755        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5756        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5757    }
5758
5759    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5760        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5761                *effectMode);
5762        if (NAME_NOT_FOUND != val) {
5763            uint8_t fwk_effectMode = (uint8_t)val;
5764            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5765        }
5766    }
5767
5768    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5769            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5770        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5771                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5772        if (NAME_NOT_FOUND != fwk_testPatternMode) {
5773            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5774        }
5775        int32_t fwk_testPatternData[4];
5776        fwk_testPatternData[0] = testPatternData->r;
5777        fwk_testPatternData[3] = testPatternData->b;
5778        switch (gCamCapability[mCameraId]->color_arrangement) {
5779        case CAM_FILTER_ARRANGEMENT_RGGB:
5780        case CAM_FILTER_ARRANGEMENT_GRBG:
5781            fwk_testPatternData[1] = testPatternData->gr;
5782            fwk_testPatternData[2] = testPatternData->gb;
5783            break;
5784        case CAM_FILTER_ARRANGEMENT_GBRG:
5785        case CAM_FILTER_ARRANGEMENT_BGGR:
5786            fwk_testPatternData[2] = testPatternData->gr;
5787            fwk_testPatternData[1] = testPatternData->gb;
5788            break;
5789        default:
5790            LOGE("color arrangement %d is not supported",
5791                gCamCapability[mCameraId]->color_arrangement);
5792            break;
5793        }
5794        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5795    }
5796
5797    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5798        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5799    }
5800
5801    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5802        String8 str((const char *)gps_methods);
5803        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5804    }
5805
5806    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5807        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5808    }
5809
5810    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5811        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5812    }
5813
5814    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5815        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5816        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5817    }
5818
5819    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5820        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5821        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5822    }
5823
5824    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5825        int32_t fwk_thumb_size[2];
5826        fwk_thumb_size[0] = thumb_size->width;
5827        fwk_thumb_size[1] = thumb_size->height;
5828        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5829    }
5830
5831    // Skip reprocess metadata for high speed mode.
5832    if (mBatchSize == 0) {
5833        IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5834            camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5835                     privateData,
5836                     MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5837        }
5838    }
5839
5840    if (metadata->is_tuning_params_valid) {
5841        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5842        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5843        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5844
5845
5846        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5847                sizeof(uint32_t));
5848        data += sizeof(uint32_t);
5849
5850        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5851                sizeof(uint32_t));
5852        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5853        data += sizeof(uint32_t);
5854
5855        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5856                sizeof(uint32_t));
5857        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5858        data += sizeof(uint32_t);
5859
5860        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5861                sizeof(uint32_t));
5862        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5863        data += sizeof(uint32_t);
5864
5865        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5866                sizeof(uint32_t));
5867        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5868        data += sizeof(uint32_t);
5869
5870        metadata->tuning_params.tuning_mod3_data_size = 0;
5871        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5872                sizeof(uint32_t));
5873        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5874        data += sizeof(uint32_t);
5875
5876        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5877                TUNING_SENSOR_DATA_MAX);
5878        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5879                count);
5880        data += count;
5881
5882        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5883                TUNING_VFE_DATA_MAX);
5884        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5885                count);
5886        data += count;
5887
5888        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5889                TUNING_CPP_DATA_MAX);
5890        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5891                count);
5892        data += count;
5893
5894        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5895                TUNING_CAC_DATA_MAX);
5896        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5897                count);
5898        data += count;
5899
5900        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5901                (int32_t *)(void *)tuning_meta_data_blob,
5902                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5903    }
5904
5905    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5906            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5907        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5908                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5909                NEUTRAL_COL_POINTS);
5910    }
5911
5912    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5913        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5914        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5915    }
5916
5917    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5918        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5919        // Adjust crop region from sensor output coordinate system to active
5920        // array coordinate system.
5921        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5922                hAeRegions->rect.width, hAeRegions->rect.height);
5923
5924        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5925        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5926                REGIONS_TUPLE_COUNT);
5927        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5928                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5929                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5930                hAeRegions->rect.height);
5931    }
5932
5933    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5934        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5935        if (NAME_NOT_FOUND != val) {
5936            uint8_t fwkAfMode = (uint8_t)val;
5937            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5938            LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5939        } else {
5940            LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5941                    val);
5942        }
5943    }
5944
5945    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5946        uint8_t fwk_afState = (uint8_t) *afState;
5947        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5948        LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5949    }
5950
5951    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5952        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5953    }
5954
5955    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5956        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5957    }
5958
5959    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5960        uint8_t fwk_lensState = *lensState;
5961        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5962    }
5963
5964    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5965        /*af regions*/
5966        int32_t afRegions[REGIONS_TUPLE_COUNT];
5967        // Adjust crop region from sensor output coordinate system to active
5968        // array coordinate system.
5969        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5970                hAfRegions->rect.width, hAfRegions->rect.height);
5971
5972        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5973        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5974                REGIONS_TUPLE_COUNT);
5975        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5976                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5977                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5978                hAfRegions->rect.height);
5979    }
5980
5981    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5982        uint32_t ab_mode = *hal_ab_mode;
5983        if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
5984                ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
5985              ab_mode = CAM_ANTIBANDING_MODE_AUTO;
5986        }
5987        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5988                ab_mode);
5989        if (NAME_NOT_FOUND != val) {
5990            uint8_t fwk_ab_mode = (uint8_t)val;
5991            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5992        }
5993    }
5994
5995    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5996        int val = lookupFwkName(SCENE_MODES_MAP,
5997                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5998        if (NAME_NOT_FOUND != val) {
5999            uint8_t fwkBestshotMode = (uint8_t)val;
6000            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6001            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6002        } else {
6003            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6004        }
6005    }
6006
6007    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6008         uint8_t fwk_mode = (uint8_t) *mode;
6009         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6010    }
6011
6012    /* Constant metadata values to be update*/
6013    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6014    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6015
6016    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6017    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6018
6019    int32_t hotPixelMap[2];
6020    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6021
6022    // CDS
6023    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6024        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6025    }
6026
6027    // TNR
6028    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6029        uint8_t tnr_enable       = tnr->denoise_enable;
6030        int32_t tnr_process_type = (int32_t)tnr->process_plates;
6031
6032        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6033        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6034    }
6035
6036    // Reprocess crop data
6037    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6038        uint8_t cnt = crop_data->num_of_streams;
6039        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6040            // mm-qcamera-daemon only posts crop_data for streams
6041            // not linked to pproc. So no valid crop metadata is not
6042            // necessarily an error case.
6043            LOGD("No valid crop metadata entries");
6044        } else {
6045            uint32_t reproc_stream_id;
6046            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6047                LOGD("No reprocessible stream found, ignore crop data");
6048            } else {
6049                int rc = NO_ERROR;
6050                Vector<int32_t> roi_map;
6051                int32_t *crop = new int32_t[cnt*4];
6052                if (NULL == crop) {
6053                   rc = NO_MEMORY;
6054                }
6055                if (NO_ERROR == rc) {
6056                    int32_t streams_found = 0;
6057                    for (size_t i = 0; i < cnt; i++) {
6058                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6059                            if (pprocDone) {
6060                                // HAL already does internal reprocessing,
6061                                // either via reprocessing before JPEG encoding,
6062                                // or offline postprocessing for pproc bypass case.
6063                                crop[0] = 0;
6064                                crop[1] = 0;
6065                                crop[2] = mInputStreamInfo.dim.width;
6066                                crop[3] = mInputStreamInfo.dim.height;
6067                            } else {
6068                                crop[0] = crop_data->crop_info[i].crop.left;
6069                                crop[1] = crop_data->crop_info[i].crop.top;
6070                                crop[2] = crop_data->crop_info[i].crop.width;
6071                                crop[3] = crop_data->crop_info[i].crop.height;
6072                            }
6073                            roi_map.add(crop_data->crop_info[i].roi_map.left);
6074                            roi_map.add(crop_data->crop_info[i].roi_map.top);
6075                            roi_map.add(crop_data->crop_info[i].roi_map.width);
6076                            roi_map.add(crop_data->crop_info[i].roi_map.height);
6077                            streams_found++;
6078                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6079                                    crop[0], crop[1], crop[2], crop[3]);
6080                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6081                                    crop_data->crop_info[i].roi_map.left,
6082                                    crop_data->crop_info[i].roi_map.top,
6083                                    crop_data->crop_info[i].roi_map.width,
6084                                    crop_data->crop_info[i].roi_map.height);
6085                            break;
6086
6087                       }
6088                    }
6089                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6090                            &streams_found, 1);
6091                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
6092                            crop, (size_t)(streams_found * 4));
6093                    if (roi_map.array()) {
6094                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6095                                roi_map.array(), roi_map.size());
6096                    }
6097               }
6098               if (crop) {
6099                   delete [] crop;
6100               }
6101            }
6102        }
6103    }
6104
6105    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6106        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6107        // so hardcoding the CAC result to OFF mode.
6108        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6109        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6110    } else {
6111        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6112            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6113                    *cacMode);
6114            if (NAME_NOT_FOUND != val) {
6115                uint8_t resultCacMode = (uint8_t)val;
6116                // check whether CAC result from CB is equal to Framework set CAC mode
6117                // If not equal then set the CAC mode came in corresponding request
6118                if (fwk_cacMode != resultCacMode) {
6119                    resultCacMode = fwk_cacMode;
6120                }
6121                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6122                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6123            } else {
6124                LOGE("Invalid CAC camera parameter: %d", *cacMode);
6125            }
6126        }
6127    }
6128
6129    // Post blob of cam_cds_data through vendor tag.
6130    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6131        uint8_t cnt = cdsInfo->num_of_streams;
6132        cam_cds_data_t cdsDataOverride;
6133        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6134        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6135        cdsDataOverride.num_of_streams = 1;
6136        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6137            uint32_t reproc_stream_id;
6138            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6139                LOGD("No reprocessible stream found, ignore cds data");
6140            } else {
6141                for (size_t i = 0; i < cnt; i++) {
6142                    if (cdsInfo->cds_info[i].stream_id ==
6143                            reproc_stream_id) {
6144                        cdsDataOverride.cds_info[0].cds_enable =
6145                                cdsInfo->cds_info[i].cds_enable;
6146                        break;
6147                    }
6148                }
6149            }
6150        } else {
6151            LOGD("Invalid stream count %d in CDS_DATA", cnt);
6152        }
6153        camMetadata.update(QCAMERA3_CDS_INFO,
6154                (uint8_t *)&cdsDataOverride,
6155                sizeof(cam_cds_data_t));
6156    }
6157
6158    // Ldaf calibration data
6159    if (!mLdafCalibExist) {
6160        IF_META_AVAILABLE(uint32_t, ldafCalib,
6161                CAM_INTF_META_LDAF_EXIF, metadata) {
6162            mLdafCalibExist = true;
6163            mLdafCalib[0] = ldafCalib[0];
6164            mLdafCalib[1] = ldafCalib[1];
6165            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6166                    ldafCalib[0], ldafCalib[1]);
6167        }
6168    }
6169
6170    // AF scene change
6171    IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6172        camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6173    }
6174
6175    resultMetadata = camMetadata.release();
6176    return resultMetadata;
6177}
6178
6179/*===========================================================================
6180 * FUNCTION   : saveExifParams
6181 *
6182 * DESCRIPTION:
6183 *
6184 * PARAMETERS :
6185 *   @metadata : metadata information from callback
6186 *
6187 * RETURN     : none
6188 *
6189 *==========================================================================*/
6190void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6191{
6192    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6193            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6194        if (mExifParams.debug_params) {
6195            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6196            mExifParams.debug_params->ae_debug_params_valid = TRUE;
6197        }
6198    }
6199    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6200            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6201        if (mExifParams.debug_params) {
6202            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6203            mExifParams.debug_params->awb_debug_params_valid = TRUE;
6204        }
6205    }
6206    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6207            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6208        if (mExifParams.debug_params) {
6209            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6210            mExifParams.debug_params->af_debug_params_valid = TRUE;
6211        }
6212    }
6213    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6214            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6215        if (mExifParams.debug_params) {
6216            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6217            mExifParams.debug_params->asd_debug_params_valid = TRUE;
6218        }
6219    }
6220    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6221            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6222        if (mExifParams.debug_params) {
6223            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6224            mExifParams.debug_params->stats_debug_params_valid = TRUE;
6225        }
6226    }
6227    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6228            CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6229        if (mExifParams.debug_params) {
6230            mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6231            mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6232        }
6233    }
6234    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6235            CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6236        if (mExifParams.debug_params) {
6237            mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6238            mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6239        }
6240    }
6241    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6242            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6243        if (mExifParams.debug_params) {
6244            mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6245            mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6246        }
6247    }
6248}
6249
6250/*===========================================================================
6251 * FUNCTION   : get3AExifParams
6252 *
6253 * DESCRIPTION:
6254 *
6255 * PARAMETERS : none
6256 *
6257 *
6258 * RETURN     : mm_jpeg_exif_params_t
6259 *
6260 *==========================================================================*/
6261mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6262{
6263    return mExifParams;
6264}
6265
6266/*===========================================================================
6267 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
6268 *
6269 * DESCRIPTION:
6270 *
6271 * PARAMETERS :
6272 *   @metadata : metadata information from callback
6273 *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
6274 *                               urgent metadata in a batch. Always true for
6275 *                               non-batch mode.
6276 *
6277 * RETURN     : camera_metadata_t*
6278 *              metadata in a format specified by fwk
6279 *==========================================================================*/
6280camera_metadata_t*
6281QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6282                                (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
6283{
6284    CameraMetadata camMetadata;
6285    camera_metadata_t *resultMetadata;
6286
6287    if (!lastUrgentMetadataInBatch) {
6288        /* In batch mode, use empty metadata if this is not the last in batch
6289         */
6290        resultMetadata = allocate_camera_metadata(0, 0);
6291        return resultMetadata;
6292    }
6293
6294    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6295        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6296        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6297        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6298    }
6299
6300    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6301        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6302                &aecTrigger->trigger, 1);
6303        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6304                &aecTrigger->trigger_id, 1);
6305        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6306                 aecTrigger->trigger);
6307        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6308                aecTrigger->trigger_id);
6309    }
6310
6311    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6312        uint8_t fwk_ae_state = (uint8_t) *ae_state;
6313        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6314        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6315    }
6316
6317    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6318        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6319                &af_trigger->trigger, 1);
6320        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6321                 af_trigger->trigger);
6322        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6323        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6324                af_trigger->trigger_id);
6325    }
6326
6327    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6328        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6329                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6330        if (NAME_NOT_FOUND != val) {
6331            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6332            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6333            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6334        } else {
6335            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6336        }
6337    }
6338
6339    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6340    uint32_t aeMode = CAM_AE_MODE_MAX;
6341    int32_t flashMode = CAM_FLASH_MODE_MAX;
6342    int32_t redeye = -1;
6343    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6344        aeMode = *pAeMode;
6345    }
6346    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6347        flashMode = *pFlashMode;
6348    }
6349    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6350        redeye = *pRedeye;
6351    }
6352
6353    if (1 == redeye) {
6354        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6355        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6356    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6357        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6358                flashMode);
6359        if (NAME_NOT_FOUND != val) {
6360            fwk_aeMode = (uint8_t)val;
6361            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6362        } else {
6363            LOGE("Unsupported flash mode %d", flashMode);
6364        }
6365    } else if (aeMode == CAM_AE_MODE_ON) {
6366        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6367        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6368    } else if (aeMode == CAM_AE_MODE_OFF) {
6369        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6370        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6371    } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
6372        fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
6373        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6374    } else {
6375        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6376              "flashMode:%d, aeMode:%u!!!",
6377                 redeye, flashMode, aeMode);
6378    }
6379
6380    resultMetadata = camMetadata.release();
6381    return resultMetadata;
6382}
6383
6384/*===========================================================================
6385 * FUNCTION   : dumpMetadataToFile
6386 *
6387 * DESCRIPTION: Dumps tuning metadata to file system
6388 *
6389 * PARAMETERS :
6390 *   @meta           : tuning metadata
6391 *   @dumpFrameCount : current dump frame count
6392 *   @enabled        : Enable mask
6393 *
6394 *==========================================================================*/
6395void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6396                                                   uint32_t &dumpFrameCount,
6397                                                   bool enabled,
6398                                                   const char *type,
6399                                                   uint32_t frameNumber)
6400{
6401    //Some sanity checks
6402    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6403        LOGE("Tuning sensor data size bigger than expected %d: %d",
6404              meta.tuning_sensor_data_size,
6405              TUNING_SENSOR_DATA_MAX);
6406        return;
6407    }
6408
6409    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6410        LOGE("Tuning VFE data size bigger than expected %d: %d",
6411              meta.tuning_vfe_data_size,
6412              TUNING_VFE_DATA_MAX);
6413        return;
6414    }
6415
6416    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6417        LOGE("Tuning CPP data size bigger than expected %d: %d",
6418              meta.tuning_cpp_data_size,
6419              TUNING_CPP_DATA_MAX);
6420        return;
6421    }
6422
6423    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6424        LOGE("Tuning CAC data size bigger than expected %d: %d",
6425              meta.tuning_cac_data_size,
6426              TUNING_CAC_DATA_MAX);
6427        return;
6428    }
6429    //
6430
6431    if(enabled){
6432        char timeBuf[FILENAME_MAX];
6433        char buf[FILENAME_MAX];
6434        memset(buf, 0, sizeof(buf));
6435        memset(timeBuf, 0, sizeof(timeBuf));
6436        time_t current_time;
6437        struct tm * timeinfo;
6438        time (&current_time);
6439        timeinfo = localtime (&current_time);
6440        if (timeinfo != NULL) {
6441            /* Consistent naming for Jpeg+meta+raw: meta name */
6442            strftime (timeBuf, sizeof(timeBuf),
6443                    QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6444            /* Consistent naming for Jpeg+meta+raw: meta name end*/
6445        }
6446        String8 filePath(timeBuf);
6447         /* Consistent naming for Jpeg+meta+raw */
6448        snprintf(buf,
6449                sizeof(buf),
6450                "%dm_%s_%d.bin",
6451                dumpFrameCount,
6452                type,
6453                frameNumber);
6454         /* Consistent naming for Jpeg+meta+raw end */
6455        filePath.append(buf);
6456        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6457        if (file_fd >= 0) {
6458            ssize_t written_len = 0;
6459            meta.tuning_data_version = TUNING_DATA_VERSION;
6460            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6461            written_len += write(file_fd, data, sizeof(uint32_t));
6462            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6463            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6464            written_len += write(file_fd, data, sizeof(uint32_t));
6465            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6466            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6467            written_len += write(file_fd, data, sizeof(uint32_t));
6468            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6469            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6470            written_len += write(file_fd, data, sizeof(uint32_t));
6471            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6472            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6473            written_len += write(file_fd, data, sizeof(uint32_t));
6474            meta.tuning_mod3_data_size = 0;
6475            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6476            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6477            written_len += write(file_fd, data, sizeof(uint32_t));
6478            size_t total_size = meta.tuning_sensor_data_size;
6479            data = (void *)((uint8_t *)&meta.data);
6480            written_len += write(file_fd, data, total_size);
6481            total_size = meta.tuning_vfe_data_size;
6482            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6483            written_len += write(file_fd, data, total_size);
6484            total_size = meta.tuning_cpp_data_size;
6485            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6486            written_len += write(file_fd, data, total_size);
6487            total_size = meta.tuning_cac_data_size;
6488            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6489            written_len += write(file_fd, data, total_size);
6490            close(file_fd);
6491        }else {
6492            LOGE("fail to open file for metadata dumping");
6493        }
6494    }
6495}
6496
6497/*===========================================================================
6498 * FUNCTION   : cleanAndSortStreamInfo
6499 *
6500 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6501 *              and sort them such that raw stream is at the end of the list
6502 *              This is a workaround for camera daemon constraint.
6503 *
6504 * PARAMETERS : None
6505 *
6506 *==========================================================================*/
6507void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6508{
6509    List<stream_info_t *> newStreamInfo;
6510
6511    /*clean up invalid streams*/
6512    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6513            it != mStreamInfo.end();) {
6514        if(((*it)->status) == INVALID){
6515            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6516            delete channel;
6517            free(*it);
6518            it = mStreamInfo.erase(it);
6519        } else {
6520            it++;
6521        }
6522    }
6523
6524    // Move preview/video/callback/snapshot streams into newList
6525    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6526            it != mStreamInfo.end();) {
6527        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6528                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6529                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6530            newStreamInfo.push_back(*it);
6531            it = mStreamInfo.erase(it);
6532        } else
6533            it++;
6534    }
6535    // Move raw streams into newList
6536    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6537            it != mStreamInfo.end();) {
6538        newStreamInfo.push_back(*it);
6539        it = mStreamInfo.erase(it);
6540    }
6541
6542    mStreamInfo = newStreamInfo;
6543}
6544
6545/*===========================================================================
6546 * FUNCTION   : extractJpegMetadata
6547 *
6548 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6549 *              JPEG metadata is cached in HAL, and return as part of capture
6550 *              result when metadata is returned from camera daemon.
6551 *
6552 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6553 *              @request:      capture request
6554 *
6555 *==========================================================================*/
6556void QCamera3HardwareInterface::extractJpegMetadata(
6557        CameraMetadata& jpegMetadata,
6558        const camera3_capture_request_t *request)
6559{
6560    CameraMetadata frame_settings;
6561    frame_settings = request->settings;
6562
6563    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6564        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6565                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6566                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6567
6568    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6569        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6570                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6571                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6572
6573    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6574        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6575                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6576                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6577
6578    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6579        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6580                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6581                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6582
6583    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6584        jpegMetadata.update(ANDROID_JPEG_QUALITY,
6585                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6586                frame_settings.find(ANDROID_JPEG_QUALITY).count);
6587
6588    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6589        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6590                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6591                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6592
6593    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6594        int32_t thumbnail_size[2];
6595        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6596        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6597        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6598            int32_t orientation =
6599                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6600            if ((orientation == 90) || (orientation == 270)) {
6601               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6602               int32_t temp;
6603               temp = thumbnail_size[0];
6604               thumbnail_size[0] = thumbnail_size[1];
6605               thumbnail_size[1] = temp;
6606            }
6607         }
6608         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6609                thumbnail_size,
6610                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6611    }
6612
6613}
6614
6615/*===========================================================================
6616 * FUNCTION   : convertToRegions
6617 *
6618 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6619 *
6620 * PARAMETERS :
6621 *   @rect   : cam_rect_t struct to convert
6622 *   @region : int32_t destination array
6623 *   @weight : if we are converting from cam_area_t, weight is valid
6624 *             else weight = -1
6625 *
6626 *==========================================================================*/
6627void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6628        int32_t *region, int weight)
6629{
6630    region[0] = rect.left;
6631    region[1] = rect.top;
6632    region[2] = rect.left + rect.width;
6633    region[3] = rect.top + rect.height;
6634    if (weight > -1) {
6635        region[4] = weight;
6636    }
6637}
6638
6639/*===========================================================================
6640 * FUNCTION   : convertFromRegions
6641 *
6642 * DESCRIPTION: helper method to convert from array to cam_rect_t
6643 *
6644 * PARAMETERS :
6645 *   @rect   : cam_rect_t struct to convert
6646 *   @region : int32_t destination array
6647 *   @weight : if we are converting from cam_area_t, weight is valid
6648 *             else weight = -1
6649 *
6650 *==========================================================================*/
6651void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6652        const camera_metadata_t *settings, uint32_t tag)
6653{
6654    CameraMetadata frame_settings;
6655    frame_settings = settings;
6656    int32_t x_min = frame_settings.find(tag).data.i32[0];
6657    int32_t y_min = frame_settings.find(tag).data.i32[1];
6658    int32_t x_max = frame_settings.find(tag).data.i32[2];
6659    int32_t y_max = frame_settings.find(tag).data.i32[3];
6660    roi.weight = frame_settings.find(tag).data.i32[4];
6661    roi.rect.left = x_min;
6662    roi.rect.top = y_min;
6663    roi.rect.width = x_max - x_min;
6664    roi.rect.height = y_max - y_min;
6665}
6666
6667/*===========================================================================
6668 * FUNCTION   : resetIfNeededROI
6669 *
6670 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6671 *              crop region
6672 *
6673 * PARAMETERS :
6674 *   @roi       : cam_area_t struct to resize
6675 *   @scalerCropRegion : cam_crop_region_t region to compare against
6676 *
6677 *
6678 *==========================================================================*/
6679bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6680                                                 const cam_crop_region_t* scalerCropRegion)
6681{
6682    int32_t roi_x_max = roi->rect.width + roi->rect.left;
6683    int32_t roi_y_max = roi->rect.height + roi->rect.top;
6684    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6685    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6686
6687    /* According to spec weight = 0 is used to indicate roi needs to be disabled
6688     * without having this check the calculations below to validate if the roi
6689     * is inside scalar crop region will fail resulting in the roi not being
6690     * reset causing algorithm to continue to use stale roi window
6691     */
6692    if (roi->weight == 0) {
6693        return true;
6694    }
6695
6696    if ((roi_x_max < scalerCropRegion->left) ||
6697        // right edge of roi window is left of scalar crop's left edge
6698        (roi_y_max < scalerCropRegion->top)  ||
6699        // bottom edge of roi window is above scalar crop's top edge
6700        (roi->rect.left > crop_x_max) ||
6701        // left edge of roi window is beyond(right) of scalar crop's right edge
6702        (roi->rect.top > crop_y_max)){
6703        // top edge of roi windo is above scalar crop's top edge
6704        return false;
6705    }
6706    if (roi->rect.left < scalerCropRegion->left) {
6707        roi->rect.left = scalerCropRegion->left;
6708    }
6709    if (roi->rect.top < scalerCropRegion->top) {
6710        roi->rect.top = scalerCropRegion->top;
6711    }
6712    if (roi_x_max > crop_x_max) {
6713        roi_x_max = crop_x_max;
6714    }
6715    if (roi_y_max > crop_y_max) {
6716        roi_y_max = crop_y_max;
6717    }
6718    roi->rect.width = roi_x_max - roi->rect.left;
6719    roi->rect.height = roi_y_max - roi->rect.top;
6720    return true;
6721}
6722
6723/*===========================================================================
6724 * FUNCTION   : convertLandmarks
6725 *
6726 * DESCRIPTION: helper method to extract the landmarks from face detection info
6727 *
6728 * PARAMETERS :
6729 *   @landmark_data : input landmark data to be converted
6730 *   @landmarks : int32_t destination array
6731 *
6732 *
6733 *==========================================================================*/
6734void QCamera3HardwareInterface::convertLandmarks(
6735        cam_face_landmarks_info_t landmark_data,
6736        int32_t *landmarks)
6737{
6738    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6739    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6740    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6741    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6742    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6743    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6744}
6745
6746#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6747/*===========================================================================
6748 * FUNCTION   : initCapabilities
6749 *
6750 * DESCRIPTION: initialize camera capabilities in static data struct
6751 *
6752 * PARAMETERS :
6753 *   @cameraId  : camera Id
6754 *
6755 * RETURN     : int32_t type of status
6756 *              NO_ERROR  -- success
6757 *              none-zero failure code
6758 *==========================================================================*/
6759int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6760{
6761    int rc = 0;
6762    mm_camera_vtbl_t *cameraHandle = NULL;
6763    QCamera3HeapMemory *capabilityHeap = NULL;
6764
6765    rc = camera_open((uint8_t)cameraId, &cameraHandle);
6766    if (rc) {
6767        LOGE("camera_open failed. rc = %d", rc);
6768        goto open_failed;
6769    }
6770    if (!cameraHandle) {
6771        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6772        goto open_failed;
6773    }
6774
6775    capabilityHeap = new QCamera3HeapMemory(1);
6776    if (capabilityHeap == NULL) {
6777        LOGE("creation of capabilityHeap failed");
6778        goto heap_creation_failed;
6779    }
6780    /* Allocate memory for capability buffer */
6781    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6782    if(rc != OK) {
6783        LOGE("No memory for cappability");
6784        goto allocate_failed;
6785    }
6786
6787    /* Map memory for capability buffer */
6788    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6789    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6790                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
6791                                capabilityHeap->getFd(0),
6792                                sizeof(cam_capability_t),
6793                                capabilityHeap->getPtr(0));
6794    if(rc < 0) {
6795        LOGE("failed to map capability buffer");
6796        goto map_failed;
6797    }
6798
6799    /* Query Capability */
6800    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6801    if(rc < 0) {
6802        LOGE("failed to query capability");
6803        goto query_failed;
6804    }
6805    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6806    if (!gCamCapability[cameraId]) {
6807        LOGE("out of memory");
6808        goto query_failed;
6809    }
6810    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6811                                        sizeof(cam_capability_t));
6812
6813    int index;
6814    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6815        cam_analysis_info_t *p_analysis_info =
6816                &gCamCapability[cameraId]->analysis_info[index];
6817        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6818        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6819    }
6820    rc = 0;
6821
6822query_failed:
6823    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6824                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
6825map_failed:
6826    capabilityHeap->deallocate();
6827allocate_failed:
6828    delete capabilityHeap;
6829heap_creation_failed:
6830    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6831    cameraHandle = NULL;
6832open_failed:
6833    return rc;
6834}
6835
6836/*==========================================================================
6837 * FUNCTION   : get3Aversion
6838 *
6839 * DESCRIPTION: get the Q3A S/W version
6840 *
6841 * PARAMETERS :
6842 *  @sw_version: Reference of Q3A structure which will hold version info upon
6843 *               return
6844 *
6845 * RETURN     : None
6846 *
6847 *==========================================================================*/
6848void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6849{
6850    if(gCamCapability[mCameraId])
6851        sw_version = gCamCapability[mCameraId]->q3a_version;
6852    else
6853        LOGE("Capability structure NULL!");
6854}
6855
6856
6857/*===========================================================================
6858 * FUNCTION   : initParameters
6859 *
6860 * DESCRIPTION: initialize camera parameters
6861 *
6862 * PARAMETERS :
6863 *
6864 * RETURN     : int32_t type of status
6865 *              NO_ERROR  -- success
6866 *              none-zero failure code
6867 *==========================================================================*/
6868int QCamera3HardwareInterface::initParameters()
6869{
6870    int rc = 0;
6871
6872    //Allocate Set Param Buffer
6873    mParamHeap = new QCamera3HeapMemory(1);
6874    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6875    if(rc != OK) {
6876        rc = NO_MEMORY;
6877        LOGE("Failed to allocate SETPARM Heap memory");
6878        delete mParamHeap;
6879        mParamHeap = NULL;
6880        return rc;
6881    }
6882
6883    //Map memory for parameters buffer
6884    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6885            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6886            mParamHeap->getFd(0),
6887            sizeof(metadata_buffer_t),
6888            (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6889    if(rc < 0) {
6890        LOGE("failed to map SETPARM buffer");
6891        rc = FAILED_TRANSACTION;
6892        mParamHeap->deallocate();
6893        delete mParamHeap;
6894        mParamHeap = NULL;
6895        return rc;
6896    }
6897
6898    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6899
6900    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6901    return rc;
6902}
6903
6904/*===========================================================================
6905 * FUNCTION   : deinitParameters
6906 *
6907 * DESCRIPTION: de-initialize camera parameters
6908 *
6909 * PARAMETERS :
6910 *
6911 * RETURN     : NONE
6912 *==========================================================================*/
6913void QCamera3HardwareInterface::deinitParameters()
6914{
6915    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6916            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6917
6918    mParamHeap->deallocate();
6919    delete mParamHeap;
6920    mParamHeap = NULL;
6921
6922    mParameters = NULL;
6923
6924    free(mPrevParameters);
6925    mPrevParameters = NULL;
6926}
6927
6928/*===========================================================================
6929 * FUNCTION   : calcMaxJpegSize
6930 *
6931 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6932 *
6933 * PARAMETERS :
6934 *
6935 * RETURN     : max_jpeg_size
6936 *==========================================================================*/
6937size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6938{
6939    size_t max_jpeg_size = 0;
6940    size_t temp_width, temp_height;
6941    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6942            MAX_SIZES_CNT);
6943    for (size_t i = 0; i < count; i++) {
6944        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6945        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6946        if (temp_width * temp_height > max_jpeg_size ) {
6947            max_jpeg_size = temp_width * temp_height;
6948        }
6949    }
6950    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6951    return max_jpeg_size;
6952}
6953
6954/*===========================================================================
6955 * FUNCTION   : getMaxRawSize
6956 *
6957 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6958 *
6959 * PARAMETERS :
6960 *
6961 * RETURN     : Largest supported Raw Dimension
6962 *==========================================================================*/
6963cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6964{
6965    int max_width = 0;
6966    cam_dimension_t maxRawSize;
6967
6968    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6969    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6970        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6971            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6972            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6973        }
6974    }
6975    return maxRawSize;
6976}
6977
6978
6979/*===========================================================================
6980 * FUNCTION   : calcMaxJpegDim
6981 *
6982 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6983 *
6984 * PARAMETERS :
6985 *
6986 * RETURN     : max_jpeg_dim
6987 *==========================================================================*/
6988cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6989{
6990    cam_dimension_t max_jpeg_dim;
6991    cam_dimension_t curr_jpeg_dim;
6992    max_jpeg_dim.width = 0;
6993    max_jpeg_dim.height = 0;
6994    curr_jpeg_dim.width = 0;
6995    curr_jpeg_dim.height = 0;
6996    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6997        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6998        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6999        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7000            max_jpeg_dim.width * max_jpeg_dim.height ) {
7001            max_jpeg_dim.width = curr_jpeg_dim.width;
7002            max_jpeg_dim.height = curr_jpeg_dim.height;
7003        }
7004    }
7005    return max_jpeg_dim;
7006}
7007
7008/*===========================================================================
7009 * FUNCTION   : addStreamConfig
7010 *
7011 * DESCRIPTION: adds the stream configuration to the array
7012 *
7013 * PARAMETERS :
7014 * @available_stream_configs : pointer to stream configuration array
7015 * @scalar_format            : scalar format
7016 * @dim                      : configuration dimension
7017 * @config_type              : input or output configuration type
7018 *
7019 * RETURN     : NONE
7020 *==========================================================================*/
7021void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7022        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7023{
7024    available_stream_configs.add(scalar_format);
7025    available_stream_configs.add(dim.width);
7026    available_stream_configs.add(dim.height);
7027    available_stream_configs.add(config_type);
7028}
7029
7030/*===========================================================================
7031 * FUNCTION   : suppportBurstCapture
7032 *
7033 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7034 *
7035 * PARAMETERS :
7036 *   @cameraId  : camera Id
7037 *
7038 * RETURN     : true if camera supports BURST_CAPTURE
7039 *              false otherwise
7040 *==========================================================================*/
7041bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7042{
7043    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7044    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7045    const int32_t highResWidth = 3264;
7046    const int32_t highResHeight = 2448;
7047
7048    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7049        // Maximum resolution images cannot be captured at >= 10fps
7050        // -> not supporting BURST_CAPTURE
7051        return false;
7052    }
7053
7054    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7055        // Maximum resolution images can be captured at >= 20fps
7056        // --> supporting BURST_CAPTURE
7057        return true;
7058    }
7059
7060    // Find the smallest highRes resolution, or largest resolution if there is none
7061    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7062            MAX_SIZES_CNT);
7063    size_t highRes = 0;
7064    while ((highRes + 1 < totalCnt) &&
7065            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7066            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7067            highResWidth * highResHeight)) {
7068        highRes++;
7069    }
7070    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7071        return true;
7072    } else {
7073        return false;
7074    }
7075}
7076
7077/*===========================================================================
7078 * FUNCTION   : initStaticMetadata
7079 *
7080 * DESCRIPTION: initialize the static metadata
7081 *
7082 * PARAMETERS :
7083 *   @cameraId  : camera Id
7084 *
7085 * RETURN     : int32_t type of status
7086 *              0  -- success
7087 *              non-zero failure code
7088 *==========================================================================*/
7089int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7090{
7091    int rc = 0;
7092    CameraMetadata staticInfo;
7093    size_t count = 0;
7094    bool limitedDevice = false;
7095    char prop[PROPERTY_VALUE_MAX];
7096    bool supportBurst = false;
7097
7098    supportBurst = supportBurstCapture(cameraId);
7099
7100    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7101     * guaranteed or if min fps of max resolution is less than 20 fps, its
7102     * advertised as limited device*/
7103    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7104            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7105            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7106            !supportBurst;
7107
7108    uint8_t supportedHwLvl = limitedDevice ?
7109            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
7110            // LEVEL_3 - This device will support level 3.
7111            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7112
7113    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7114            &supportedHwLvl, 1);
7115
7116    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
7117    /*HAL 3 only*/
7118    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7119                    &gCamCapability[cameraId]->min_focus_distance, 1);
7120
7121    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7122                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
7123
7124    /*should be using focal lengths but sensor doesn't provide that info now*/
7125    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7126                      &gCamCapability[cameraId]->focal_length,
7127                      1);
7128
7129    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7130            gCamCapability[cameraId]->apertures,
7131            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7132
7133    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7134            gCamCapability[cameraId]->filter_densities,
7135            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7136
7137
7138    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7139            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7140            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7141
7142    int32_t lens_shading_map_size[] = {
7143            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7144            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7145    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7146                      lens_shading_map_size,
7147                      sizeof(lens_shading_map_size)/sizeof(int32_t));
7148
7149    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7150            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7151
7152    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7153            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7154
7155    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7156            &gCamCapability[cameraId]->max_frame_duration, 1);
7157
7158    camera_metadata_rational baseGainFactor = {
7159            gCamCapability[cameraId]->base_gain_factor.numerator,
7160            gCamCapability[cameraId]->base_gain_factor.denominator};
7161    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7162                      &baseGainFactor, 1);
7163
7164    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7165                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7166
7167    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7168            gCamCapability[cameraId]->pixel_array_size.height};
7169    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7170                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7171
7172    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7173            gCamCapability[cameraId]->active_array_size.top,
7174            gCamCapability[cameraId]->active_array_size.width,
7175            gCamCapability[cameraId]->active_array_size.height};
7176    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7177            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7178
7179    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7180            &gCamCapability[cameraId]->white_level, 1);
7181
7182    int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
7183    adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
7184            gCamCapability[cameraId]->color_arrangement);
7185    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7186            adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
7187
7188    bool hasBlackRegions = false;
7189    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7190        LOGW("black_region_count: %d is bounded to %d",
7191            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7192        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7193    }
7194    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7195        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7196        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7197            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7198        }
7199        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7200                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7201        hasBlackRegions = true;
7202    }
7203
7204    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7205            &gCamCapability[cameraId]->flash_charge_duration, 1);
7206
7207    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7208            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7209
7210    uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
7211            ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
7212            ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
7213    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7214            &timestampSource, 1);
7215
7216    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7217            &gCamCapability[cameraId]->histogram_size, 1);
7218
7219    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7220            &gCamCapability[cameraId]->max_histogram_count, 1);
7221
7222    int32_t sharpness_map_size[] = {
7223            gCamCapability[cameraId]->sharpness_map_size.width,
7224            gCamCapability[cameraId]->sharpness_map_size.height};
7225
7226    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7227            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7228
7229    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7230            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7231
7232    int32_t scalar_formats[] = {
7233            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7234            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7235            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7236            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7237            HAL_PIXEL_FORMAT_RAW10,
7238            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7239    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7240    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7241                      scalar_formats,
7242                      scalar_formats_count);
7243
7244    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7245    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7246    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7247            count, MAX_SIZES_CNT, available_processed_sizes);
7248    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7249            available_processed_sizes, count * 2);
7250
7251    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7252    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7253    makeTable(gCamCapability[cameraId]->raw_dim,
7254            count, MAX_SIZES_CNT, available_raw_sizes);
7255    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7256            available_raw_sizes, count * 2);
7257
7258    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7259    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7260    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7261            count, MAX_SIZES_CNT, available_fps_ranges);
7262    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7263            available_fps_ranges, count * 2);
7264
7265    camera_metadata_rational exposureCompensationStep = {
7266            gCamCapability[cameraId]->exp_compensation_step.numerator,
7267            gCamCapability[cameraId]->exp_compensation_step.denominator};
7268    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7269                      &exposureCompensationStep, 1);
7270
7271    Vector<uint8_t> availableVstabModes;
7272    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7273    char eis_prop[PROPERTY_VALUE_MAX];
7274    memset(eis_prop, 0, sizeof(eis_prop));
7275    property_get("persist.camera.eis.enable", eis_prop, "0");
7276    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7277    if (facingBack && eis_prop_set) {
7278        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7279    }
7280    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7281                      availableVstabModes.array(), availableVstabModes.size());
7282
7283    /*HAL 1 and HAL 3 common*/
7284    float maxZoom = 4;
7285    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7286            &maxZoom, 1);
7287
7288    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7289    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7290
7291    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7292    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7293        max3aRegions[2] = 0; /* AF not supported */
7294    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7295            max3aRegions, 3);
7296
7297    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7298    memset(prop, 0, sizeof(prop));
7299    property_get("persist.camera.facedetect", prop, "1");
7300    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7301    LOGD("Support face detection mode: %d",
7302             supportedFaceDetectMode);
7303
7304    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
7305    Vector<uint8_t> availableFaceDetectModes;
7306    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7307    if (supportedFaceDetectMode == 1) {
7308        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7309    } else if (supportedFaceDetectMode == 2) {
7310        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7311    } else if (supportedFaceDetectMode == 3) {
7312        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7313        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7314    } else {
7315        maxFaces = 0;
7316    }
7317    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7318            availableFaceDetectModes.array(),
7319            availableFaceDetectModes.size());
7320    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7321            (int32_t *)&maxFaces, 1);
7322
7323    int32_t exposureCompensationRange[] = {
7324            gCamCapability[cameraId]->exposure_compensation_min,
7325            gCamCapability[cameraId]->exposure_compensation_max};
7326    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7327            exposureCompensationRange,
7328            sizeof(exposureCompensationRange)/sizeof(int32_t));
7329
7330    uint8_t lensFacing = (facingBack) ?
7331            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7332    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7333
7334    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7335                      available_thumbnail_sizes,
7336                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7337
7338    /*all sizes will be clubbed into this tag*/
7339    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7340    /*android.scaler.availableStreamConfigurations*/
7341    Vector<int32_t> available_stream_configs;
7342    cam_dimension_t active_array_dim;
7343    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7344    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7345    /* Add input/output stream configurations for each scalar formats*/
7346    for (size_t j = 0; j < scalar_formats_count; j++) {
7347        switch (scalar_formats[j]) {
7348        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7349        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7350        case HAL_PIXEL_FORMAT_RAW10:
7351            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7352                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7353                addStreamConfig(available_stream_configs, scalar_formats[j],
7354                        gCamCapability[cameraId]->raw_dim[i],
7355                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7356            }
7357            break;
7358        case HAL_PIXEL_FORMAT_BLOB:
7359            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7360                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7361                addStreamConfig(available_stream_configs, scalar_formats[j],
7362                        gCamCapability[cameraId]->picture_sizes_tbl[i],
7363                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7364            }
7365            break;
7366        case HAL_PIXEL_FORMAT_YCbCr_420_888:
7367        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7368        default:
7369            cam_dimension_t largest_picture_size;
7370            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7371            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7372                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7373                addStreamConfig(available_stream_configs, scalar_formats[j],
7374                        gCamCapability[cameraId]->picture_sizes_tbl[i],
7375                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7376                /* Book keep largest */
7377                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7378                        >= largest_picture_size.width &&
7379                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
7380                        >= largest_picture_size.height)
7381                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7382            }
7383            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7384            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7385                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7386                 addStreamConfig(available_stream_configs, scalar_formats[j],
7387                         largest_picture_size,
7388                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7389            }
7390            break;
7391        }
7392    }
7393
7394    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7395                      available_stream_configs.array(), available_stream_configs.size());
7396    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7397    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7398
7399    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7400    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7401
7402    /* android.scaler.availableMinFrameDurations */
7403    Vector<int64_t> available_min_durations;
7404    for (size_t j = 0; j < scalar_formats_count; j++) {
7405        switch (scalar_formats[j]) {
7406        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7407        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7408        case HAL_PIXEL_FORMAT_RAW10:
7409            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7410                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7411                available_min_durations.add(scalar_formats[j]);
7412                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7413                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7414                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7415            }
7416            break;
7417        default:
7418            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7419                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7420                available_min_durations.add(scalar_formats[j]);
7421                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7422                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7423                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7424            }
7425            break;
7426        }
7427    }
7428    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7429                      available_min_durations.array(), available_min_durations.size());
7430
7431    Vector<int32_t> available_hfr_configs;
7432    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7433        int32_t fps = 0;
7434        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7435        case CAM_HFR_MODE_60FPS:
7436            fps = 60;
7437            break;
7438        case CAM_HFR_MODE_90FPS:
7439            fps = 90;
7440            break;
7441        case CAM_HFR_MODE_120FPS:
7442            fps = 120;
7443            break;
7444        case CAM_HFR_MODE_150FPS:
7445            fps = 150;
7446            break;
7447        case CAM_HFR_MODE_180FPS:
7448            fps = 180;
7449            break;
7450        case CAM_HFR_MODE_210FPS:
7451            fps = 210;
7452            break;
7453        case CAM_HFR_MODE_240FPS:
7454            fps = 240;
7455            break;
7456        case CAM_HFR_MODE_480FPS:
7457            fps = 480;
7458            break;
7459        case CAM_HFR_MODE_OFF:
7460        case CAM_HFR_MODE_MAX:
7461        default:
7462            break;
7463        }
7464
7465        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7466        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7467            /* For each HFR frame rate, need to advertise one variable fps range
7468             * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7469             * and [120, 120]. While camcorder preview alone is running [30, 120] is
7470             * set by the app. When video recording is started, [120, 120] is
7471             * set. This way sensor configuration does not change when recording
7472             * is started */
7473
7474            /* (width, height, fps_min, fps_max, batch_size_max) */
7475            for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7476                j < MAX_SIZES_CNT; j++) {
7477                available_hfr_configs.add(
7478                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7479                available_hfr_configs.add(
7480                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7481                available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7482                available_hfr_configs.add(fps);
7483                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7484
7485                /* (width, height, fps_min, fps_max, batch_size_max) */
7486                available_hfr_configs.add(
7487                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7488                available_hfr_configs.add(
7489                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7490                available_hfr_configs.add(fps);
7491                available_hfr_configs.add(fps);
7492                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7493            }
7494       }
7495    }
7496    //Advertise HFR capability only if the property is set
7497    memset(prop, 0, sizeof(prop));
7498    property_get("persist.camera.hal3hfr.enable", prop, "1");
7499    uint8_t hfrEnable = (uint8_t)atoi(prop);
7500
7501    if(hfrEnable && available_hfr_configs.array()) {
7502        staticInfo.update(
7503                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7504                available_hfr_configs.array(), available_hfr_configs.size());
7505    }
7506
7507    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7508    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7509                      &max_jpeg_size, 1);
7510
7511    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7512    size_t size = 0;
7513    count = CAM_EFFECT_MODE_MAX;
7514    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7515    for (size_t i = 0; i < count; i++) {
7516        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7517                gCamCapability[cameraId]->supported_effects[i]);
7518        if (NAME_NOT_FOUND != val) {
7519            avail_effects[size] = (uint8_t)val;
7520            size++;
7521        }
7522    }
7523    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7524                      avail_effects,
7525                      size);
7526
7527    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7528    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7529    size_t supported_scene_modes_cnt = 0;
7530    count = CAM_SCENE_MODE_MAX;
7531    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7532    for (size_t i = 0; i < count; i++) {
7533        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7534                CAM_SCENE_MODE_OFF) {
7535            int val = lookupFwkName(SCENE_MODES_MAP,
7536                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
7537                    gCamCapability[cameraId]->supported_scene_modes[i]);
7538            if (NAME_NOT_FOUND != val) {
7539                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7540                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7541                supported_scene_modes_cnt++;
7542            }
7543        }
7544    }
7545    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7546                      avail_scene_modes,
7547                      supported_scene_modes_cnt);
7548
7549    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
7550    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7551                      supported_scene_modes_cnt,
7552                      CAM_SCENE_MODE_MAX,
7553                      scene_mode_overrides,
7554                      supported_indexes,
7555                      cameraId);
7556
7557    if (supported_scene_modes_cnt == 0) {
7558        supported_scene_modes_cnt = 1;
7559        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7560    }
7561
7562    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7563            scene_mode_overrides, supported_scene_modes_cnt * 3);
7564
7565    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7566                                         ANDROID_CONTROL_MODE_AUTO,
7567                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7568    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7569            available_control_modes,
7570            3);
7571
7572    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7573    size = 0;
7574    count = CAM_ANTIBANDING_MODE_MAX;
7575    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7576    for (size_t i = 0; i < count; i++) {
7577        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7578                gCamCapability[cameraId]->supported_antibandings[i]);
7579        if (NAME_NOT_FOUND != val) {
7580            avail_antibanding_modes[size] = (uint8_t)val;
7581            size++;
7582        }
7583
7584    }
7585    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7586                      avail_antibanding_modes,
7587                      size);
7588
7589    uint8_t avail_abberation_modes[] = {
7590            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7591            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7592            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7593    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7594    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7595    if (0 == count) {
7596        //  If no aberration correction modes are available for a device, this advertise OFF mode
7597        size = 1;
7598    } else {
7599        // If count is not zero then atleast one among the FAST or HIGH quality is supported
7600        // So, advertize all 3 modes if atleast any one mode is supported as per the
7601        // new M requirement
7602        size = 3;
7603    }
7604    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7605            avail_abberation_modes,
7606            size);
7607
7608    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7609    size = 0;
7610    count = CAM_FOCUS_MODE_MAX;
7611    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7612    for (size_t i = 0; i < count; i++) {
7613        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7614                gCamCapability[cameraId]->supported_focus_modes[i]);
7615        if (NAME_NOT_FOUND != val) {
7616            avail_af_modes[size] = (uint8_t)val;
7617            size++;
7618        }
7619    }
7620    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7621                      avail_af_modes,
7622                      size);
7623
7624    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7625    size = 0;
7626    count = CAM_WB_MODE_MAX;
7627    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7628    for (size_t i = 0; i < count; i++) {
7629        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7630                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7631                gCamCapability[cameraId]->supported_white_balances[i]);
7632        if (NAME_NOT_FOUND != val) {
7633            avail_awb_modes[size] = (uint8_t)val;
7634            size++;
7635        }
7636    }
7637    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7638                      avail_awb_modes,
7639                      size);
7640
7641    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7642    count = CAM_FLASH_FIRING_LEVEL_MAX;
7643    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7644            count);
7645    for (size_t i = 0; i < count; i++) {
7646        available_flash_levels[i] =
7647                gCamCapability[cameraId]->supported_firing_levels[i];
7648    }
7649    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7650            available_flash_levels, count);
7651
7652    uint8_t flashAvailable;
7653    if (gCamCapability[cameraId]->flash_available)
7654        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7655    else
7656        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7657    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7658            &flashAvailable, 1);
7659
7660    Vector<uint8_t> avail_ae_modes;
7661    count = CAM_AE_MODE_MAX;
7662    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7663    for (size_t i = 0; i < count; i++) {
7664        uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
7665        if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
7666            aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
7667        }
7668        avail_ae_modes.add(aeMode);
7669    }
7670    if (flashAvailable) {
7671        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7672        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7673    }
7674    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7675                      avail_ae_modes.array(),
7676                      avail_ae_modes.size());
7677
7678    int32_t sensitivity_range[2];
7679    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7680    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7681    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7682                      sensitivity_range,
7683                      sizeof(sensitivity_range) / sizeof(int32_t));
7684
7685    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7686                      &gCamCapability[cameraId]->max_analog_sensitivity,
7687                      1);
7688
7689    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7690    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7691                      &sensor_orientation,
7692                      1);
7693
7694    int32_t max_output_streams[] = {
7695            MAX_STALLING_STREAMS,
7696            MAX_PROCESSED_STREAMS,
7697            MAX_RAW_STREAMS};
7698    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7699            max_output_streams,
7700            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7701
7702    uint8_t avail_leds = 0;
7703    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7704                      &avail_leds, 0);
7705
7706    uint8_t focus_dist_calibrated;
7707    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7708            gCamCapability[cameraId]->focus_dist_calibrated);
7709    if (NAME_NOT_FOUND != val) {
7710        focus_dist_calibrated = (uint8_t)val;
7711        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7712                     &focus_dist_calibrated, 1);
7713    }
7714
7715    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7716    size = 0;
7717    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7718            MAX_TEST_PATTERN_CNT);
7719    for (size_t i = 0; i < count; i++) {
7720        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7721                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7722        if (NAME_NOT_FOUND != testpatternMode) {
7723            avail_testpattern_modes[size] = testpatternMode;
7724            size++;
7725        }
7726    }
7727    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7728                      avail_testpattern_modes,
7729                      size);
7730
7731    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7732    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7733                      &max_pipeline_depth,
7734                      1);
7735
7736    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7737    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7738                      &partial_result_count,
7739                       1);
7740
7741    int32_t max_stall_duration = MAX_REPROCESS_STALL;
7742    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7743
7744    Vector<uint8_t> available_capabilities;
7745    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7746    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7747    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7748    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7749    if (supportBurst) {
7750        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7751    }
7752    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7753    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7754    if (hfrEnable && available_hfr_configs.array()) {
7755        available_capabilities.add(
7756                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7757    }
7758
7759    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7760        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7761    }
7762    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7763            available_capabilities.array(),
7764            available_capabilities.size());
7765
7766    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7767    //Assumption is that all bayer cameras support MANUAL_SENSOR.
7768    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7769            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7770
7771    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7772            &aeLockAvailable, 1);
7773
7774    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7775    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7776    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7777            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7778
7779    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7780            &awbLockAvailable, 1);
7781
7782    int32_t max_input_streams = 1;
7783    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7784                      &max_input_streams,
7785                      1);
7786
7787    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7788    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7789            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7790            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7791            HAL_PIXEL_FORMAT_YCbCr_420_888};
7792    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7793                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7794
7795    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7796    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7797                      &max_latency,
7798                      1);
7799
7800    int32_t isp_sensitivity_range[2];
7801    isp_sensitivity_range[0] =
7802        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7803    isp_sensitivity_range[1] =
7804        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7805    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7806                      isp_sensitivity_range,
7807                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7808
7809    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7810                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7811    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7812            available_hot_pixel_modes,
7813            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7814
7815    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7816                                         ANDROID_SHADING_MODE_FAST,
7817                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
7818    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7819                      available_shading_modes,
7820                      3);
7821
7822    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7823                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7824    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7825                      available_lens_shading_map_modes,
7826                      2);
7827
7828    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7829                                      ANDROID_EDGE_MODE_FAST,
7830                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
7831                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7832    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7833            available_edge_modes,
7834            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7835
7836    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7837                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
7838                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7839                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7840                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7841    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7842            available_noise_red_modes,
7843            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7844
7845    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7846                                         ANDROID_TONEMAP_MODE_FAST,
7847                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7848    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7849            available_tonemap_modes,
7850            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7851
7852    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7853    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7854            available_hot_pixel_map_modes,
7855            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7856
7857    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7858            gCamCapability[cameraId]->reference_illuminant1);
7859    if (NAME_NOT_FOUND != val) {
7860        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7861        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7862    }
7863
7864    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7865            gCamCapability[cameraId]->reference_illuminant2);
7866    if (NAME_NOT_FOUND != val) {
7867        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7868        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7869    }
7870
7871    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7872            (void *)gCamCapability[cameraId]->forward_matrix1,
7873            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7874
7875    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7876            (void *)gCamCapability[cameraId]->forward_matrix2,
7877            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7878
7879    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7880            (void *)gCamCapability[cameraId]->color_transform1,
7881            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7882
7883    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7884            (void *)gCamCapability[cameraId]->color_transform2,
7885            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7886
7887    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7888            (void *)gCamCapability[cameraId]->calibration_transform1,
7889            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7890
7891    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7892            (void *)gCamCapability[cameraId]->calibration_transform2,
7893            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7894
7895    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7896       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7897       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7898       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7899       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7900       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7901       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7902       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7903       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7904       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7905       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7906       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7907       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7908       ANDROID_JPEG_GPS_COORDINATES,
7909       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7910       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7911       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7912       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7913       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7914       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7915       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7916       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7917       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7918       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7919       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7920       ANDROID_STATISTICS_FACE_DETECT_MODE,
7921       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7922       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7923       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7924       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7925       /* DevCamDebug metadata request_keys_basic */
7926       DEVCAMDEBUG_META_ENABLE,
7927       /* DevCamDebug metadata end */
7928       };
7929
7930    size_t request_keys_cnt =
7931            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7932    Vector<int32_t> available_request_keys;
7933    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7934    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7935        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7936    }
7937
7938    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7939            available_request_keys.array(), available_request_keys.size());
7940
7941    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7942       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7943       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7944       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7945       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7946       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7947       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7948       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7949       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7950       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7951       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7952       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7953       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7954       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7955       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7956       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7957       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7958       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7959       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7960       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7961       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7962       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7963       ANDROID_STATISTICS_FACE_SCORES,
7964       NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7965       NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
7966       // DevCamDebug metadata result_keys_basic
7967       DEVCAMDEBUG_META_ENABLE,
7968       // DevCamDebug metadata result_keys AF
7969       DEVCAMDEBUG_AF_LENS_POSITION,
7970       DEVCAMDEBUG_AF_TOF_CONFIDENCE,
7971       DEVCAMDEBUG_AF_TOF_DISTANCE,
7972       DEVCAMDEBUG_AF_LUMA,
7973       DEVCAMDEBUG_AF_HAF_STATE,
7974       DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7975       DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7976       DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7977       DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7978       DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7979       DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7980       DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7981       DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7982       DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7983       DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7984       DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7985       DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7986       DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7987       DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7988       DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7989       DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7990       DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7991       DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7992       DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7993       DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7994       DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7995       DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7996       // DevCamDebug metadata result_keys AEC
7997       DEVCAMDEBUG_AEC_TARGET_LUMA,
7998       DEVCAMDEBUG_AEC_COMP_LUMA,
7999       DEVCAMDEBUG_AEC_AVG_LUMA,
8000       DEVCAMDEBUG_AEC_CUR_LUMA,
8001       DEVCAMDEBUG_AEC_LINECOUNT,
8002       DEVCAMDEBUG_AEC_REAL_GAIN,
8003       DEVCAMDEBUG_AEC_EXP_INDEX,
8004       DEVCAMDEBUG_AEC_LUX_IDX,
8005       // DevCamDebug metadata result_keys AWB
8006       DEVCAMDEBUG_AWB_R_GAIN,
8007       DEVCAMDEBUG_AWB_G_GAIN,
8008       DEVCAMDEBUG_AWB_B_GAIN,
8009       DEVCAMDEBUG_AWB_CCT,
8010       DEVCAMDEBUG_AWB_DECISION,
8011       /* DevCamDebug metadata end */
8012       };
8013    size_t result_keys_cnt =
8014            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8015
8016    Vector<int32_t> available_result_keys;
8017    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8018    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8019        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8020    }
8021    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8022        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8023        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8024    }
8025    if (supportedFaceDetectMode == 1) {
8026        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8027        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8028    } else if ((supportedFaceDetectMode == 2) ||
8029            (supportedFaceDetectMode == 3)) {
8030        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8031        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8032    }
8033    if (hasBlackRegions) {
8034        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8035        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8036    }
8037    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8038            available_result_keys.array(), available_result_keys.size());
8039
8040    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8041       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8042       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8043       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8044       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8045       ANDROID_SCALER_CROPPING_TYPE,
8046       ANDROID_SYNC_MAX_LATENCY,
8047       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8048       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8049       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8050       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8051       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8052       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8053       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8054       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8055       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8056       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8057       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8058       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8059       ANDROID_LENS_FACING,
8060       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8061       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8062       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8063       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8064       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8065       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8066       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8067       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8068       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8069       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8070       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8071       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8072       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8073       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8074       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8075       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8076       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8077       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8078       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8079       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8080       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8081       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8082       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8083       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8084       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8085       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8086       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8087       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8088       ANDROID_TONEMAP_MAX_CURVE_POINTS,
8089       ANDROID_CONTROL_AVAILABLE_MODES,
8090       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8091       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8092       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8093       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8094       ANDROID_SHADING_AVAILABLE_MODES,
8095       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8096       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
8097
8098    Vector<int32_t> available_characteristics_keys;
8099    available_characteristics_keys.appendArray(characteristics_keys_basic,
8100            sizeof(characteristics_keys_basic)/sizeof(int32_t));
8101    if (hasBlackRegions) {
8102        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8103    }
8104    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
8105                      available_characteristics_keys.array(),
8106                      available_characteristics_keys.size());
8107
8108    /*available stall durations depend on the hw + sw and will be different for different devices */
8109    /*have to add for raw after implementation*/
8110    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8111    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8112
8113    Vector<int64_t> available_stall_durations;
8114    for (uint32_t j = 0; j < stall_formats_count; j++) {
8115        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8116            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8117                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8118                available_stall_durations.add(stall_formats[j]);
8119                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8120                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8121                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8122          }
8123        } else {
8124            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8125                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8126                available_stall_durations.add(stall_formats[j]);
8127                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8128                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8129                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8130            }
8131        }
8132    }
8133    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8134                      available_stall_durations.array(),
8135                      available_stall_durations.size());
8136
8137    //QCAMERA3_OPAQUE_RAW
8138    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8139    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8140    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8141    case LEGACY_RAW:
8142        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8143            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8144        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8145            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8146        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8147            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8148        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8149        break;
8150    case MIPI_RAW:
8151        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8152            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8153        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8154            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8155        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8156            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8157        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8158        break;
8159    default:
8160        LOGE("unknown opaque_raw_format %d",
8161                gCamCapability[cameraId]->opaque_raw_fmt);
8162        break;
8163    }
8164    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8165
8166    Vector<int32_t> strides;
8167    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8168            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8169        cam_stream_buf_plane_info_t buf_planes;
8170        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8171        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8172        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8173            &gCamCapability[cameraId]->padding_info, &buf_planes);
8174        strides.add(buf_planes.plane_info.mp[0].stride);
8175    }
8176    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8177            strides.size());
8178
8179    Vector<int32_t> opaque_size;
8180    for (size_t j = 0; j < scalar_formats_count; j++) {
8181        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8182            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8183                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8184                cam_stream_buf_plane_info_t buf_planes;
8185
8186                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8187                         &gCamCapability[cameraId]->padding_info, &buf_planes);
8188
8189                if (rc == 0) {
8190                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8191                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8192                    opaque_size.add(buf_planes.plane_info.frame_len);
8193                }else {
8194                    LOGE("raw frame calculation failed!");
8195                }
8196            }
8197        }
8198    }
8199
8200    if ((opaque_size.size() > 0) &&
8201            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8202        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8203    else
8204        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8205
8206    gStaticMetadata[cameraId] = staticInfo.release();
8207    return rc;
8208}
8209
8210/*===========================================================================
8211 * FUNCTION   : makeTable
8212 *
8213 * DESCRIPTION: make a table of sizes
8214 *
8215 * PARAMETERS :
8216 *
8217 *
8218 *==========================================================================*/
8219void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8220        size_t max_size, int32_t *sizeTable)
8221{
8222    size_t j = 0;
8223    if (size > max_size) {
8224       size = max_size;
8225    }
8226    for (size_t i = 0; i < size; i++) {
8227        sizeTable[j] = dimTable[i].width;
8228        sizeTable[j+1] = dimTable[i].height;
8229        j+=2;
8230    }
8231}
8232
8233/*===========================================================================
8234 * FUNCTION   : makeFPSTable
8235 *
8236 * DESCRIPTION: make a table of fps ranges
8237 *
8238 * PARAMETERS :
8239 *
8240 *==========================================================================*/
8241void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8242        size_t max_size, int32_t *fpsRangesTable)
8243{
8244    size_t j = 0;
8245    if (size > max_size) {
8246       size = max_size;
8247    }
8248    for (size_t i = 0; i < size; i++) {
8249        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8250        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8251        j+=2;
8252    }
8253}
8254
8255/*===========================================================================
8256 * FUNCTION   : makeOverridesList
8257 *
8258 * DESCRIPTION: make a list of scene mode overrides
8259 *
8260 * PARAMETERS :
8261 *
8262 *
8263 *==========================================================================*/
8264void QCamera3HardwareInterface::makeOverridesList(
8265        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8266        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8267{
8268    /*daemon will give a list of overrides for all scene modes.
8269      However we should send the fwk only the overrides for the scene modes
8270      supported by the framework*/
8271    size_t j = 0;
8272    if (size > max_size) {
8273       size = max_size;
8274    }
8275    size_t focus_count = CAM_FOCUS_MODE_MAX;
8276    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8277            focus_count);
8278    for (size_t i = 0; i < size; i++) {
8279        bool supt = false;
8280        size_t index = supported_indexes[i];
8281        overridesList[j] = gCamCapability[camera_id]->flash_available ?
8282                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8283        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8284                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8285                overridesTable[index].awb_mode);
8286        if (NAME_NOT_FOUND != val) {
8287            overridesList[j+1] = (uint8_t)val;
8288        }
8289        uint8_t focus_override = overridesTable[index].af_mode;
8290        for (size_t k = 0; k < focus_count; k++) {
8291           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8292              supt = true;
8293              break;
8294           }
8295        }
8296        if (supt) {
8297            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8298                    focus_override);
8299            if (NAME_NOT_FOUND != val) {
8300                overridesList[j+2] = (uint8_t)val;
8301            }
8302        } else {
8303           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8304        }
8305        j+=3;
8306    }
8307}
8308
8309/*===========================================================================
8310 * FUNCTION   : filterJpegSizes
8311 *
8312 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8313 *              could be downscaled to
8314 *
8315 * PARAMETERS :
8316 *
8317 * RETURN     : length of jpegSizes array
8318 *==========================================================================*/
8319
8320size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8321        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8322        uint8_t downscale_factor)
8323{
8324    if (0 == downscale_factor) {
8325        downscale_factor = 1;
8326    }
8327
8328    int32_t min_width = active_array_size.width / downscale_factor;
8329    int32_t min_height = active_array_size.height / downscale_factor;
8330    size_t jpegSizesCnt = 0;
8331    if (processedSizesCnt > maxCount) {
8332        processedSizesCnt = maxCount;
8333    }
8334    for (size_t i = 0; i < processedSizesCnt; i+=2) {
8335        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8336            jpegSizes[jpegSizesCnt] = processedSizes[i];
8337            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8338            jpegSizesCnt += 2;
8339        }
8340    }
8341    return jpegSizesCnt;
8342}
8343
8344/*===========================================================================
8345 * FUNCTION   : computeNoiseModelEntryS
8346 *
8347 * DESCRIPTION: function to map a given sensitivity to the S noise
8348 *              model parameters in the DNG noise model.
8349 *
8350 * PARAMETERS : sens : the sensor sensitivity
8351 *
8352 ** RETURN    : S (sensor amplification) noise
8353 *
8354 *==========================================================================*/
8355double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8356    double s = gCamCapability[mCameraId]->gradient_S * sens +
8357            gCamCapability[mCameraId]->offset_S;
8358    return ((s < 0.0) ? 0.0 : s);
8359}
8360
8361/*===========================================================================
8362 * FUNCTION   : computeNoiseModelEntryO
8363 *
8364 * DESCRIPTION: function to map a given sensitivity to the O noise
8365 *              model parameters in the DNG noise model.
8366 *
8367 * PARAMETERS : sens : the sensor sensitivity
8368 *
8369 ** RETURN    : O (sensor readout) noise
8370 *
8371 *==========================================================================*/
8372double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8373    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8374    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8375            1.0 : (1.0 * sens / max_analog_sens);
8376    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8377            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8378    return ((o < 0.0) ? 0.0 : o);
8379}
8380
8381/*===========================================================================
8382 * FUNCTION   : getSensorSensitivity
8383 *
8384 * DESCRIPTION: convert iso_mode to an integer value
8385 *
8386 * PARAMETERS : iso_mode : the iso_mode supported by sensor
8387 *
8388 ** RETURN    : sensitivity supported by sensor
8389 *
8390 *==========================================================================*/
8391int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8392{
8393    int32_t sensitivity;
8394
8395    switch (iso_mode) {
8396    case CAM_ISO_MODE_100:
8397        sensitivity = 100;
8398        break;
8399    case CAM_ISO_MODE_200:
8400        sensitivity = 200;
8401        break;
8402    case CAM_ISO_MODE_400:
8403        sensitivity = 400;
8404        break;
8405    case CAM_ISO_MODE_800:
8406        sensitivity = 800;
8407        break;
8408    case CAM_ISO_MODE_1600:
8409        sensitivity = 1600;
8410        break;
8411    default:
8412        sensitivity = -1;
8413        break;
8414    }
8415    return sensitivity;
8416}
8417
8418/*===========================================================================
8419 * FUNCTION   : getCamInfo
8420 *
8421 * DESCRIPTION: query camera capabilities
8422 *
8423 * PARAMETERS :
8424 *   @cameraId  : camera Id
8425 *   @info      : camera info struct to be filled in with camera capabilities
8426 *
8427 * RETURN     : int type of status
8428 *              NO_ERROR  -- success
8429 *              none-zero failure code
8430 *==========================================================================*/
8431int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8432        struct camera_info *info)
8433{
8434    ATRACE_CALL();
8435    int rc = 0;
8436
8437    pthread_mutex_lock(&gCamLock);
8438    if (NULL == gCamCapability[cameraId]) {
8439        rc = initCapabilities(cameraId);
8440        if (rc < 0) {
8441            pthread_mutex_unlock(&gCamLock);
8442            return rc;
8443        }
8444    }
8445
8446    if (NULL == gStaticMetadata[cameraId]) {
8447        rc = initStaticMetadata(cameraId);
8448        if (rc < 0) {
8449            pthread_mutex_unlock(&gCamLock);
8450            return rc;
8451        }
8452    }
8453
8454    switch(gCamCapability[cameraId]->position) {
8455    case CAM_POSITION_BACK:
8456        info->facing = CAMERA_FACING_BACK;
8457        break;
8458
8459    case CAM_POSITION_FRONT:
8460        info->facing = CAMERA_FACING_FRONT;
8461        break;
8462
8463    default:
8464        LOGE("Unknown position type for camera id:%d", cameraId);
8465        rc = -1;
8466        break;
8467    }
8468
8469
8470    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8471    info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8472    info->static_camera_characteristics = gStaticMetadata[cameraId];
8473
8474    //For now assume both cameras can operate independently.
8475    info->conflicting_devices = NULL;
8476    info->conflicting_devices_length = 0;
8477
8478    //resource cost is 100 * MIN(1.0, m/M),
8479    //where m is throughput requirement with maximum stream configuration
8480    //and M is CPP maximum throughput.
8481    float max_fps = 0.0;
8482    for (uint32_t i = 0;
8483            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8484        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8485            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8486    }
8487    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8488            gCamCapability[cameraId]->active_array_size.width *
8489            gCamCapability[cameraId]->active_array_size.height * max_fps /
8490            gCamCapability[cameraId]->max_pixel_bandwidth;
8491    info->resource_cost = 100 * MIN(1.0, ratio);
8492    LOGI("camera %d resource cost is %d", cameraId,
8493            info->resource_cost);
8494
8495    pthread_mutex_unlock(&gCamLock);
8496    return rc;
8497}
8498
8499/*===========================================================================
8500 * FUNCTION   : translateCapabilityToMetadata
8501 *
8502 * DESCRIPTION: translate the capability into camera_metadata_t
8503 *
8504 * PARAMETERS : type of the request
8505 *
8506 *
8507 * RETURN     : success: camera_metadata_t*
8508 *              failure: NULL
8509 *
8510 *==========================================================================*/
8511camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8512{
8513    if (mDefaultMetadata[type] != NULL) {
8514        return mDefaultMetadata[type];
8515    }
8516    //first time we are handling this request
8517    //fill up the metadata structure using the wrapper class
8518    CameraMetadata settings;
8519    //translate from cam_capability_t to camera_metadata_tag_t
8520    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8521    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8522    int32_t defaultRequestID = 0;
8523    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8524
8525    /* OIS disable */
8526    char ois_prop[PROPERTY_VALUE_MAX];
8527    memset(ois_prop, 0, sizeof(ois_prop));
8528    property_get("persist.camera.ois.disable", ois_prop, "0");
8529    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8530
8531    /* Force video to use OIS */
8532    char videoOisProp[PROPERTY_VALUE_MAX];
8533    memset(videoOisProp, 0, sizeof(videoOisProp));
8534    property_get("persist.camera.ois.video", videoOisProp, "1");
8535    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8536
8537    // EIS enable/disable
8538    char eis_prop[PROPERTY_VALUE_MAX];
8539    memset(eis_prop, 0, sizeof(eis_prop));
8540    property_get("persist.camera.eis.enable", eis_prop, "0");
8541    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
8542
8543    // Hybrid AE enable/disable
8544    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8545    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8546    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8547    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8548
8549    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
8550    // This is a bit hacky. EIS is enabled only when the above setprop
8551    // is set to non-zero value and on back camera (for 2015 Nexus).
8552    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
8553    // configureStream is called before this function. In other words,
8554    // we cannot guarantee the app will call configureStream before
8555    // calling createDefaultRequest.
8556    const bool eisEnabled = facingBack && eis_prop_set;
8557
8558    uint8_t controlIntent = 0;
8559    uint8_t focusMode;
8560    uint8_t vsMode;
8561    uint8_t optStabMode;
8562    uint8_t cacMode;
8563    uint8_t edge_mode;
8564    uint8_t noise_red_mode;
8565    uint8_t tonemap_mode;
8566    bool highQualityModeEntryAvailable = FALSE;
8567    bool fastModeEntryAvailable = FALSE;
8568    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8569    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8570    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8571
8572    switch (type) {
8573      case CAMERA3_TEMPLATE_PREVIEW:
8574        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8575        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8576        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8577        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8578        edge_mode = ANDROID_EDGE_MODE_FAST;
8579        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8580        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8581        break;
8582      case CAMERA3_TEMPLATE_STILL_CAPTURE:
8583        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8584        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8585        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8586        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8587        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8588        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8589        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8590        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8591        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8592            if (gCamCapability[mCameraId]->aberration_modes[i] ==
8593                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8594                highQualityModeEntryAvailable = TRUE;
8595            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8596                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8597                fastModeEntryAvailable = TRUE;
8598            }
8599        }
8600        if (highQualityModeEntryAvailable) {
8601            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8602        } else if (fastModeEntryAvailable) {
8603            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8604        }
8605        if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8606            shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8607        }
8608        break;
8609      case CAMERA3_TEMPLATE_VIDEO_RECORD:
8610        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8611        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8612        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8613        if (eisEnabled) {
8614            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8615        }
8616        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8617        edge_mode = ANDROID_EDGE_MODE_FAST;
8618        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8619        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8620        if (forceVideoOis)
8621            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8622        break;
8623      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8624        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8625        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8626        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8627        if (eisEnabled) {
8628            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8629        }
8630        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8631        edge_mode = ANDROID_EDGE_MODE_FAST;
8632        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8633        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8634        if (forceVideoOis)
8635            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8636        break;
8637      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8638        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8639        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8640        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8641        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8642        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8643        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8644        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8645        break;
8646      case CAMERA3_TEMPLATE_MANUAL:
8647        edge_mode = ANDROID_EDGE_MODE_FAST;
8648        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8649        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8650        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8651        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8652        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8653        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8654        break;
8655      default:
8656        edge_mode = ANDROID_EDGE_MODE_FAST;
8657        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8658        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8659        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8660        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8661        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8662        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8663        break;
8664    }
8665    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8666    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8667    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8668    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8669        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8670    }
8671    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8672
8673    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8674            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8675        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8676    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8677            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8678            || ois_disable)
8679        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8680    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8681    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8682
8683    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8684            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8685
8686    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8687    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8688
8689    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8690    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8691
8692    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8693    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8694
8695    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8696    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8697
8698    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8699    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8700
8701    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8702    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8703
8704    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8705    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8706
8707    /*flash*/
8708    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8709    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8710
8711    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8712    settings.update(ANDROID_FLASH_FIRING_POWER,
8713            &flashFiringLevel, 1);
8714
8715    /* lens */
8716    float default_aperture = gCamCapability[mCameraId]->apertures[0];
8717    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8718
8719    if (gCamCapability[mCameraId]->filter_densities_count) {
8720        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8721        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8722                        gCamCapability[mCameraId]->filter_densities_count);
8723    }
8724
8725    float default_focal_length = gCamCapability[mCameraId]->focal_length;
8726    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8727
8728    if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8729        float default_focus_distance = 0;
8730        settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8731    }
8732
8733    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8734    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8735
8736    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8737    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8738
8739    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8740    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8741
8742    /* face detection (default to OFF) */
8743    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8744    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8745
8746    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8747    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8748
8749    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8750    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8751
8752    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8753    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8754
8755
8756    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8757    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8758
8759    /* Exposure time(Update the Min Exposure Time)*/
8760    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8761    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8762
8763    /* frame duration */
8764    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8765    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8766
8767    /* sensitivity */
8768    static const int32_t default_sensitivity = 100;
8769    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8770    static const int32_t default_isp_sensitivity =
8771            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8772    settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8773
8774    /*edge mode*/
8775    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8776
8777    /*noise reduction mode*/
8778    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8779
8780    /*color correction mode*/
8781    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8782    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8783
8784    /*transform matrix mode*/
8785    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8786
8787    int32_t scaler_crop_region[4];
8788    scaler_crop_region[0] = 0;
8789    scaler_crop_region[1] = 0;
8790    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8791    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8792    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8793
8794    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8795    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8796
8797    /*focus distance*/
8798    float focus_distance = 0.0;
8799    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8800
8801    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8802    /* Restrict default preview template to max 30 fps */
8803    float max_range = 0.0;
8804    float max_fixed_fps = 0.0;
8805    int32_t fps_range[2] = {0, 0};
8806    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8807            i++) {
8808        if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8809                TEMPLATE_MAX_PREVIEW_FPS) {
8810            continue;
8811        }
8812        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8813            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8814        if (type == CAMERA3_TEMPLATE_PREVIEW ||
8815                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8816                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8817            if (range > max_range) {
8818                fps_range[0] =
8819                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8820                fps_range[1] =
8821                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8822                max_range = range;
8823            }
8824        } else {
8825            if (range < 0.01 && max_fixed_fps <
8826                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8827                fps_range[0] =
8828                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8829                fps_range[1] =
8830                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8831                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8832            }
8833        }
8834    }
8835    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8836
8837    /*precapture trigger*/
8838    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8839    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8840
8841    /*af trigger*/
8842    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8843    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8844
8845    /* ae & af regions */
8846    int32_t active_region[] = {
8847            gCamCapability[mCameraId]->active_array_size.left,
8848            gCamCapability[mCameraId]->active_array_size.top,
8849            gCamCapability[mCameraId]->active_array_size.left +
8850                    gCamCapability[mCameraId]->active_array_size.width,
8851            gCamCapability[mCameraId]->active_array_size.top +
8852                    gCamCapability[mCameraId]->active_array_size.height,
8853            0};
8854    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8855            sizeof(active_region) / sizeof(active_region[0]));
8856    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8857            sizeof(active_region) / sizeof(active_region[0]));
8858
8859    /* black level lock */
8860    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8861    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8862
8863    //special defaults for manual template
8864    if (type == CAMERA3_TEMPLATE_MANUAL) {
8865        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8866        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8867
8868        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8869        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8870
8871        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8872        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8873
8874        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8875        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8876
8877        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8878        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8879
8880        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8881        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8882    }
8883
8884
8885    /* TNR
8886     * We'll use this location to determine which modes TNR will be set.
8887     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8888     * This is not to be confused with linking on a per stream basis that decision
8889     * is still on per-session basis and will be handled as part of config stream
8890     */
8891    uint8_t tnr_enable = 0;
8892
8893    if (m_bTnrPreview || m_bTnrVideo) {
8894
8895        switch (type) {
8896            case CAMERA3_TEMPLATE_VIDEO_RECORD:
8897                    tnr_enable = 1;
8898                    break;
8899
8900            default:
8901                    tnr_enable = 0;
8902                    break;
8903        }
8904
8905        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8906        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8907        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8908
8909        LOGD("TNR:%d with process plate %d for template:%d",
8910                             tnr_enable, tnr_process_type, type);
8911    }
8912
8913    //Update Link tags to default
8914    uint8_t sync_type = CAM_TYPE_STANDALONE;
8915    settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8916
8917    uint8_t is_main = 0; //this doesn't matter as app should overwrite
8918    settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8919
8920    uint8_t related_camera_id = mCameraId;
8921    settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
8922
8923    /* CDS default */
8924    char prop[PROPERTY_VALUE_MAX];
8925    memset(prop, 0, sizeof(prop));
8926    property_get("persist.camera.CDS", prop, "Auto");
8927    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8928    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8929    if (CAM_CDS_MODE_MAX == cds_mode) {
8930        cds_mode = CAM_CDS_MODE_AUTO;
8931    }
8932
8933    /* Disabling CDS in templates which have TNR enabled*/
8934    if (tnr_enable)
8935        cds_mode = CAM_CDS_MODE_OFF;
8936
8937    int32_t mode = cds_mode;
8938    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8939
8940    /* hybrid ae */
8941    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8942
8943    mDefaultMetadata[type] = settings.release();
8944
8945    return mDefaultMetadata[type];
8946}
8947
8948/*===========================================================================
8949 * FUNCTION   : setFrameParameters
8950 *
8951 * DESCRIPTION: set parameters per frame as requested in the metadata from
8952 *              framework
8953 *
8954 * PARAMETERS :
8955 *   @request   : request that needs to be serviced
8956 *   @streamsArray : Stream ID of all the requested streams
8957 *   @blob_request: Whether this request is a blob request or not
8958 *
8959 * RETURN     : success: NO_ERROR
8960 *              failure:
8961 *==========================================================================*/
8962int QCamera3HardwareInterface::setFrameParameters(
8963                    camera3_capture_request_t *request,
8964                    cam_stream_ID_t streamsArray,
8965                    int blob_request,
8966                    uint32_t snapshotStreamId)
8967{
8968    /*translate from camera_metadata_t type to parm_type_t*/
8969    int rc = 0;
8970    int32_t hal_version = CAM_HAL_V3;
8971
8972    clear_metadata_buffer(mParameters);
8973    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8974        LOGE("Failed to set hal version in the parameters");
8975        return BAD_VALUE;
8976    }
8977
8978    /*we need to update the frame number in the parameters*/
8979    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8980            request->frame_number)) {
8981        LOGE("Failed to set the frame number in the parameters");
8982        return BAD_VALUE;
8983    }
8984
8985    /* Update stream id of all the requested buffers */
8986    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
8987        LOGE("Failed to set stream type mask in the parameters");
8988        return BAD_VALUE;
8989    }
8990
8991    if (mUpdateDebugLevel) {
8992        uint32_t dummyDebugLevel = 0;
8993        /* The value of dummyDebugLevel is irrelavent. On
8994         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8995        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8996                dummyDebugLevel)) {
8997            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8998            return BAD_VALUE;
8999        }
9000        mUpdateDebugLevel = false;
9001    }
9002
9003    if(request->settings != NULL){
9004        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9005        if (blob_request)
9006            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9007    }
9008
9009    return rc;
9010}
9011
9012/*===========================================================================
9013 * FUNCTION   : setReprocParameters
9014 *
9015 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9016 *              return it.
9017 *
9018 * PARAMETERS :
9019 *   @request   : request that needs to be serviced
9020 *
9021 * RETURN     : success: NO_ERROR
9022 *              failure:
9023 *==========================================================================*/
9024int32_t QCamera3HardwareInterface::setReprocParameters(
9025        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9026        uint32_t snapshotStreamId)
9027{
9028    /*translate from camera_metadata_t type to parm_type_t*/
9029    int rc = 0;
9030
9031    if (NULL == request->settings){
9032        LOGE("Reprocess settings cannot be NULL");
9033        return BAD_VALUE;
9034    }
9035
9036    if (NULL == reprocParam) {
9037        LOGE("Invalid reprocessing metadata buffer");
9038        return BAD_VALUE;
9039    }
9040    clear_metadata_buffer(reprocParam);
9041
9042    /*we need to update the frame number in the parameters*/
9043    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9044            request->frame_number)) {
9045        LOGE("Failed to set the frame number in the parameters");
9046        return BAD_VALUE;
9047    }
9048
9049    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9050    if (rc < 0) {
9051        LOGE("Failed to translate reproc request");
9052        return rc;
9053    }
9054
9055    CameraMetadata frame_settings;
9056    frame_settings = request->settings;
9057    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9058            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9059        int32_t *crop_count =
9060                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9061        int32_t *crop_data =
9062                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9063        int32_t *roi_map =
9064                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9065        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9066            cam_crop_data_t crop_meta;
9067            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9068            crop_meta.num_of_streams = 1;
9069            crop_meta.crop_info[0].crop.left   = crop_data[0];
9070            crop_meta.crop_info[0].crop.top    = crop_data[1];
9071            crop_meta.crop_info[0].crop.width  = crop_data[2];
9072            crop_meta.crop_info[0].crop.height = crop_data[3];
9073
9074            crop_meta.crop_info[0].roi_map.left =
9075                    roi_map[0];
9076            crop_meta.crop_info[0].roi_map.top =
9077                    roi_map[1];
9078            crop_meta.crop_info[0].roi_map.width =
9079                    roi_map[2];
9080            crop_meta.crop_info[0].roi_map.height =
9081                    roi_map[3];
9082
9083            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9084                rc = BAD_VALUE;
9085            }
9086            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9087                    request->input_buffer->stream,
9088                    crop_meta.crop_info[0].crop.left,
9089                    crop_meta.crop_info[0].crop.top,
9090                    crop_meta.crop_info[0].crop.width,
9091                    crop_meta.crop_info[0].crop.height);
9092            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9093                    request->input_buffer->stream,
9094                    crop_meta.crop_info[0].roi_map.left,
9095                    crop_meta.crop_info[0].roi_map.top,
9096                    crop_meta.crop_info[0].roi_map.width,
9097                    crop_meta.crop_info[0].roi_map.height);
9098            } else {
9099                LOGE("Invalid reprocess crop count %d!", *crop_count);
9100            }
9101    } else {
9102        LOGE("No crop data from matching output stream");
9103    }
9104
9105    /* These settings are not needed for regular requests so handle them specially for
9106       reprocess requests; information needed for EXIF tags */
9107    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9108        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9109                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9110        if (NAME_NOT_FOUND != val) {
9111            uint32_t flashMode = (uint32_t)val;
9112            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9113                rc = BAD_VALUE;
9114            }
9115        } else {
9116            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9117                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9118        }
9119    } else {
9120        LOGH("No flash mode in reprocess settings");
9121    }
9122
9123    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9124        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9125        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9126            rc = BAD_VALUE;
9127        }
9128    } else {
9129        LOGH("No flash state in reprocess settings");
9130    }
9131
9132    return rc;
9133}
9134
9135/*===========================================================================
9136 * FUNCTION   : saveRequestSettings
9137 *
9138 * DESCRIPTION: Add any settings that might have changed to the request settings
9139 *              and save the settings to be applied on the frame
9140 *
9141 * PARAMETERS :
9142 *   @jpegMetadata : the extracted and/or modified jpeg metadata
9143 *   @request      : request with initial settings
9144 *
9145 * RETURN     :
9146 * camera_metadata_t* : pointer to the saved request settings
9147 *==========================================================================*/
9148camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
9149        const CameraMetadata &jpegMetadata,
9150        camera3_capture_request_t *request)
9151{
9152    camera_metadata_t *resultMetadata;
9153    CameraMetadata camMetadata;
9154    camMetadata = request->settings;
9155
9156    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9157        int32_t thumbnail_size[2];
9158        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9159        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9160        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
9161                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9162    }
9163
9164    resultMetadata = camMetadata.release();
9165    return resultMetadata;
9166}
9167
9168/*===========================================================================
9169 * FUNCTION   : setHalFpsRange
9170 *
9171 * DESCRIPTION: set FPS range parameter
9172 *
9173 *
9174 * PARAMETERS :
9175 *   @settings    : Metadata from framework
9176 *   @hal_metadata: Metadata buffer
9177 *
9178 *
9179 * RETURN     : success: NO_ERROR
9180 *              failure:
9181 *==========================================================================*/
9182int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
9183        metadata_buffer_t *hal_metadata)
9184{
9185    int32_t rc = NO_ERROR;
9186    cam_fps_range_t fps_range;
9187    fps_range.min_fps = (float)
9188            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
9189    fps_range.max_fps = (float)
9190            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
9191    fps_range.video_min_fps = fps_range.min_fps;
9192    fps_range.video_max_fps = fps_range.max_fps;
9193
9194    LOGD("aeTargetFpsRange fps: [%f %f]",
9195            fps_range.min_fps, fps_range.max_fps);
9196    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
9197     * follows:
9198     * ---------------------------------------------------------------|
9199     *      Video stream is absent in configure_streams               |
9200     *    (Camcorder preview before the first video record            |
9201     * ---------------------------------------------------------------|
9202     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9203     *                   |             |             | vid_min/max_fps|
9204     * ---------------------------------------------------------------|
9205     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9206     *                   |-------------|-------------|----------------|
9207     *                   |  [240, 240] |     240     |  [240, 240]    |
9208     * ---------------------------------------------------------------|
9209     *     Video stream is present in configure_streams               |
9210     * ---------------------------------------------------------------|
9211     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9212     *                   |             |             | vid_min/max_fps|
9213     * ---------------------------------------------------------------|
9214     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
9215     * (camcorder prev   |-------------|-------------|----------------|
9216     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
9217     *  is stopped)      |             |             |                |
9218     * ---------------------------------------------------------------|
9219     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
9220     *                   |-------------|-------------|----------------|
9221     *                   |  [240, 240] |     240     |  [240, 240]    |
9222     * ---------------------------------------------------------------|
9223     * When Video stream is absent in configure_streams,
9224     * preview fps = sensor_fps / batchsize
9225     * Eg: for 240fps at batchSize 4, preview = 60fps
9226     *     for 120fps at batchSize 4, preview = 30fps
9227     *
9228     * When video stream is present in configure_streams, preview fps is as per
9229     * the ratio of preview buffers to video buffers requested in process
9230     * capture request
9231     */
9232    mBatchSize = 0;
9233    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9234        fps_range.min_fps = fps_range.video_max_fps;
9235        fps_range.video_min_fps = fps_range.video_max_fps;
9236        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9237                fps_range.max_fps);
9238        if (NAME_NOT_FOUND != val) {
9239            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9240            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9241                return BAD_VALUE;
9242            }
9243
9244            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9245                /* If batchmode is currently in progress and the fps changes,
9246                 * set the flag to restart the sensor */
9247                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9248                        (mHFRVideoFps != fps_range.max_fps)) {
9249                    mNeedSensorRestart = true;
9250                }
9251                mHFRVideoFps = fps_range.max_fps;
9252                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9253                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9254                    mBatchSize = MAX_HFR_BATCH_SIZE;
9255                }
9256             }
9257            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9258
9259         }
9260    } else {
9261        /* HFR mode is session param in backend/ISP. This should be reset when
9262         * in non-HFR mode  */
9263        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9264        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9265            return BAD_VALUE;
9266        }
9267    }
9268    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9269        return BAD_VALUE;
9270    }
9271    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9272            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9273    return rc;
9274}
9275
9276/*===========================================================================
9277 * FUNCTION   : translateToHalMetadata
9278 *
9279 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9280 *
9281 *
9282 * PARAMETERS :
9283 *   @request  : request sent from framework
9284 *
9285 *
9286 * RETURN     : success: NO_ERROR
9287 *              failure:
9288 *==========================================================================*/
9289int QCamera3HardwareInterface::translateToHalMetadata
9290                                  (const camera3_capture_request_t *request,
9291                                   metadata_buffer_t *hal_metadata,
9292                                   uint32_t snapshotStreamId)
9293{
9294    int rc = 0;
9295    CameraMetadata frame_settings;
9296    frame_settings = request->settings;
9297
9298    /* Do not change the order of the following list unless you know what you are
9299     * doing.
9300     * The order is laid out in such a way that parameters in the front of the table
9301     * may be used to override the parameters later in the table. Examples are:
9302     * 1. META_MODE should precede AEC/AWB/AF MODE
9303     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9304     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9305     * 4. Any mode should precede it's corresponding settings
9306     */
9307    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9308        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9309        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9310            rc = BAD_VALUE;
9311        }
9312        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9313        if (rc != NO_ERROR) {
9314            LOGE("extractSceneMode failed");
9315        }
9316    }
9317
9318    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9319        uint8_t fwk_aeMode =
9320            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9321        uint8_t aeMode;
9322        int32_t redeye;
9323
9324        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9325            aeMode = CAM_AE_MODE_OFF;
9326        } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
9327            aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
9328        } else {
9329            aeMode = CAM_AE_MODE_ON;
9330        }
9331        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9332            redeye = 1;
9333        } else {
9334            redeye = 0;
9335        }
9336
9337        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9338                fwk_aeMode);
9339        if (NAME_NOT_FOUND != val) {
9340            int32_t flashMode = (int32_t)val;
9341            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9342        }
9343
9344        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9345        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9346            rc = BAD_VALUE;
9347        }
9348    }
9349
9350    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9351        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9352        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9353                fwk_whiteLevel);
9354        if (NAME_NOT_FOUND != val) {
9355            uint8_t whiteLevel = (uint8_t)val;
9356            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9357                rc = BAD_VALUE;
9358            }
9359        }
9360    }
9361
9362    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9363        uint8_t fwk_cacMode =
9364                frame_settings.find(
9365                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9366        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9367                fwk_cacMode);
9368        if (NAME_NOT_FOUND != val) {
9369            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9370            bool entryAvailable = FALSE;
9371            // Check whether Frameworks set CAC mode is supported in device or not
9372            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9373                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9374                    entryAvailable = TRUE;
9375                    break;
9376                }
9377            }
9378            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9379            // If entry not found then set the device supported mode instead of frameworks mode i.e,
9380            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9381            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9382            if (entryAvailable == FALSE) {
9383                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9384                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9385                } else {
9386                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9387                        // High is not supported and so set the FAST as spec say's underlying
9388                        // device implementation can be the same for both modes.
9389                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9390                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9391                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9392                        // in order to avoid the fps drop due to high quality
9393                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9394                    } else {
9395                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9396                    }
9397                }
9398            }
9399            LOGD("Final cacMode is %d", cacMode);
9400            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9401                rc = BAD_VALUE;
9402            }
9403        } else {
9404            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9405        }
9406    }
9407
9408    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9409        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9410        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9411                fwk_focusMode);
9412        if (NAME_NOT_FOUND != val) {
9413            uint8_t focusMode = (uint8_t)val;
9414            LOGD("set focus mode %d", focusMode);
9415            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9416                rc = BAD_VALUE;
9417            }
9418        }
9419    }
9420
9421    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9422        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9423        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9424                focalDistance)) {
9425            rc = BAD_VALUE;
9426        }
9427    }
9428
9429    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9430        uint8_t fwk_antibandingMode =
9431                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9432        int val = lookupHalName(ANTIBANDING_MODES_MAP,
9433                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9434        if (NAME_NOT_FOUND != val) {
9435            uint32_t hal_antibandingMode = (uint32_t)val;
9436            if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
9437                if (m60HzZone) {
9438                    hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
9439                } else {
9440                    hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
9441                }
9442            }
9443            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9444                    hal_antibandingMode)) {
9445                rc = BAD_VALUE;
9446            }
9447        }
9448    }
9449
9450    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9451        int32_t expCompensation = frame_settings.find(
9452                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9453        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9454            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9455        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9456            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9457        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9458                expCompensation)) {
9459            rc = BAD_VALUE;
9460        }
9461    }
9462
9463    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9464        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9465        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9466            rc = BAD_VALUE;
9467        }
9468    }
9469    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9470        rc = setHalFpsRange(frame_settings, hal_metadata);
9471        if (rc != NO_ERROR) {
9472            LOGE("setHalFpsRange failed");
9473        }
9474    }
9475
9476    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9477        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9478        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9479            rc = BAD_VALUE;
9480        }
9481    }
9482
9483    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9484        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9485        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9486                fwk_effectMode);
9487        if (NAME_NOT_FOUND != val) {
9488            uint8_t effectMode = (uint8_t)val;
9489            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9490                rc = BAD_VALUE;
9491            }
9492        }
9493    }
9494
9495    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9496        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9497        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9498                colorCorrectMode)) {
9499            rc = BAD_VALUE;
9500        }
9501    }
9502
9503    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9504        cam_color_correct_gains_t colorCorrectGains;
9505        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9506            colorCorrectGains.gains[i] =
9507                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9508        }
9509        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9510                colorCorrectGains)) {
9511            rc = BAD_VALUE;
9512        }
9513    }
9514
9515    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9516        cam_color_correct_matrix_t colorCorrectTransform;
9517        cam_rational_type_t transform_elem;
9518        size_t num = 0;
9519        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9520           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9521              transform_elem.numerator =
9522                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9523              transform_elem.denominator =
9524                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9525              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9526              num++;
9527           }
9528        }
9529        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9530                colorCorrectTransform)) {
9531            rc = BAD_VALUE;
9532        }
9533    }
9534
9535    cam_trigger_t aecTrigger;
9536    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9537    aecTrigger.trigger_id = -1;
9538    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9539        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9540        aecTrigger.trigger =
9541            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9542        aecTrigger.trigger_id =
9543            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9544        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9545                aecTrigger)) {
9546            rc = BAD_VALUE;
9547        }
9548        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9549                aecTrigger.trigger, aecTrigger.trigger_id);
9550    }
9551
9552    /*af_trigger must come with a trigger id*/
9553    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9554        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9555        cam_trigger_t af_trigger;
9556        af_trigger.trigger =
9557            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9558        af_trigger.trigger_id =
9559            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9560        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9561            rc = BAD_VALUE;
9562        }
9563        LOGD("AfTrigger: %d AfTriggerID: %d",
9564                af_trigger.trigger, af_trigger.trigger_id);
9565    }
9566
9567    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9568        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9569        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9570            rc = BAD_VALUE;
9571        }
9572    }
9573    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9574        cam_edge_application_t edge_application;
9575        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9576        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9577            edge_application.sharpness = 0;
9578        } else {
9579            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9580        }
9581        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9582            rc = BAD_VALUE;
9583        }
9584    }
9585
9586    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9587        int32_t respectFlashMode = 1;
9588        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9589            uint8_t fwk_aeMode =
9590                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9591            if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
9592                    fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
9593                    fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9594                respectFlashMode = 0;
9595                LOGH("AE Mode controls flash, ignore android.flash.mode");
9596            }
9597        }
9598        if (respectFlashMode) {
9599            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9600                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9601            LOGH("flash mode after mapping %d", val);
9602            // To check: CAM_INTF_META_FLASH_MODE usage
9603            if (NAME_NOT_FOUND != val) {
9604                uint8_t flashMode = (uint8_t)val;
9605                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9606                    rc = BAD_VALUE;
9607                }
9608            }
9609        }
9610    }
9611
9612    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9613        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9614        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9615            rc = BAD_VALUE;
9616        }
9617    }
9618
9619    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9620        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9621        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9622                flashFiringTime)) {
9623            rc = BAD_VALUE;
9624        }
9625    }
9626
9627    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9628        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9629        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9630                hotPixelMode)) {
9631            rc = BAD_VALUE;
9632        }
9633    }
9634
9635    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9636        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9637        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9638                lensAperture)) {
9639            rc = BAD_VALUE;
9640        }
9641    }
9642
9643    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9644        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9645        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9646                filterDensity)) {
9647            rc = BAD_VALUE;
9648        }
9649    }
9650
9651    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9652        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9653        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9654                focalLength)) {
9655            rc = BAD_VALUE;
9656        }
9657    }
9658
9659    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9660        uint8_t optStabMode =
9661                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9662        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9663                optStabMode)) {
9664            rc = BAD_VALUE;
9665        }
9666    }
9667
9668    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9669        uint8_t videoStabMode =
9670                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9671        LOGD("videoStabMode from APP = %d", videoStabMode);
9672        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9673                videoStabMode)) {
9674            rc = BAD_VALUE;
9675        }
9676    }
9677
9678
9679    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9680        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9681        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9682                noiseRedMode)) {
9683            rc = BAD_VALUE;
9684        }
9685    }
9686
9687    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9688        float reprocessEffectiveExposureFactor =
9689            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9690        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9691                reprocessEffectiveExposureFactor)) {
9692            rc = BAD_VALUE;
9693        }
9694    }
9695
9696    cam_crop_region_t scalerCropRegion;
9697    bool scalerCropSet = false;
9698    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9699        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9700        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9701        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9702        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9703
9704        // Map coordinate system from active array to sensor output.
9705        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9706                scalerCropRegion.width, scalerCropRegion.height);
9707
9708        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9709                scalerCropRegion)) {
9710            rc = BAD_VALUE;
9711        }
9712        scalerCropSet = true;
9713    }
9714
9715    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9716        int64_t sensorExpTime =
9717                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9718        LOGD("setting sensorExpTime %lld", sensorExpTime);
9719        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9720                sensorExpTime)) {
9721            rc = BAD_VALUE;
9722        }
9723    }
9724
9725    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9726        int64_t sensorFrameDuration =
9727                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9728        int64_t minFrameDuration = getMinFrameDuration(request);
9729        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9730        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9731            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9732        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9733        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9734                sensorFrameDuration)) {
9735            rc = BAD_VALUE;
9736        }
9737    }
9738
9739    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9740        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9741        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9742                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9743        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9744                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9745        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9746        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9747                sensorSensitivity)) {
9748            rc = BAD_VALUE;
9749        }
9750    }
9751
9752    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9753        int32_t ispSensitivity =
9754            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9755        if (ispSensitivity <
9756            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9757                ispSensitivity =
9758                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9759                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9760        }
9761        if (ispSensitivity >
9762            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9763                ispSensitivity =
9764                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9765                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9766        }
9767        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9768                ispSensitivity)) {
9769            rc = BAD_VALUE;
9770        }
9771    }
9772
9773    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9774        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9775        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9776            rc = BAD_VALUE;
9777        }
9778    }
9779
9780    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9781        uint8_t fwk_facedetectMode =
9782                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9783
9784        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9785                fwk_facedetectMode);
9786
9787        if (NAME_NOT_FOUND != val) {
9788            uint8_t facedetectMode = (uint8_t)val;
9789            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9790                    facedetectMode)) {
9791                rc = BAD_VALUE;
9792            }
9793        }
9794    }
9795
9796    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9797        uint8_t histogramMode =
9798                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9799        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9800                histogramMode)) {
9801            rc = BAD_VALUE;
9802        }
9803    }
9804
9805    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9806        uint8_t sharpnessMapMode =
9807                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9808        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9809                sharpnessMapMode)) {
9810            rc = BAD_VALUE;
9811        }
9812    }
9813
9814    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9815        uint8_t tonemapMode =
9816                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9817        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9818            rc = BAD_VALUE;
9819        }
9820    }
9821    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9822    /*All tonemap channels will have the same number of points*/
9823    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9824        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9825        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9826        cam_rgb_tonemap_curves tonemapCurves;
9827        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9828        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9829            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9830                     tonemapCurves.tonemap_points_cnt,
9831                    CAM_MAX_TONEMAP_CURVE_SIZE);
9832            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9833        }
9834
9835        /* ch0 = G*/
9836        size_t point = 0;
9837        cam_tonemap_curve_t tonemapCurveGreen;
9838        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9839            for (size_t j = 0; j < 2; j++) {
9840               tonemapCurveGreen.tonemap_points[i][j] =
9841                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9842               point++;
9843            }
9844        }
9845        tonemapCurves.curves[0] = tonemapCurveGreen;
9846
9847        /* ch 1 = B */
9848        point = 0;
9849        cam_tonemap_curve_t tonemapCurveBlue;
9850        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9851            for (size_t j = 0; j < 2; j++) {
9852               tonemapCurveBlue.tonemap_points[i][j] =
9853                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9854               point++;
9855            }
9856        }
9857        tonemapCurves.curves[1] = tonemapCurveBlue;
9858
9859        /* ch 2 = R */
9860        point = 0;
9861        cam_tonemap_curve_t tonemapCurveRed;
9862        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9863            for (size_t j = 0; j < 2; j++) {
9864               tonemapCurveRed.tonemap_points[i][j] =
9865                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9866               point++;
9867            }
9868        }
9869        tonemapCurves.curves[2] = tonemapCurveRed;
9870
9871        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9872                tonemapCurves)) {
9873            rc = BAD_VALUE;
9874        }
9875    }
9876
9877    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9878        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9879        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9880                captureIntent)) {
9881            rc = BAD_VALUE;
9882        }
9883    }
9884
9885    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9886        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9887        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9888                blackLevelLock)) {
9889            rc = BAD_VALUE;
9890        }
9891    }
9892
9893    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9894        uint8_t lensShadingMapMode =
9895                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9896        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9897                lensShadingMapMode)) {
9898            rc = BAD_VALUE;
9899        }
9900    }
9901
9902    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9903        cam_area_t roi;
9904        bool reset = true;
9905        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9906
9907        // Map coordinate system from active array to sensor output.
9908        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9909                roi.rect.height);
9910
9911        if (scalerCropSet) {
9912            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9913        }
9914        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9915            rc = BAD_VALUE;
9916        }
9917    }
9918
9919    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9920        cam_area_t roi;
9921        bool reset = true;
9922        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9923
9924        // Map coordinate system from active array to sensor output.
9925        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9926                roi.rect.height);
9927
9928        if (scalerCropSet) {
9929            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9930        }
9931        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9932            rc = BAD_VALUE;
9933        }
9934    }
9935
9936    // CDS for non-HFR non-video mode
9937    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9938            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9939        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9940        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9941            LOGE("Invalid CDS mode %d!", *fwk_cds);
9942        } else {
9943            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9944                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9945                rc = BAD_VALUE;
9946            }
9947        }
9948    }
9949
9950    // TNR
9951    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9952        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9953        uint8_t b_TnrRequested = 0;
9954        cam_denoise_param_t tnr;
9955        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9956        tnr.process_plates =
9957            (cam_denoise_process_type_t)frame_settings.find(
9958            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9959        b_TnrRequested = tnr.denoise_enable;
9960        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9961            rc = BAD_VALUE;
9962        }
9963    }
9964
9965    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9966        int32_t fwk_testPatternMode =
9967                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9968        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9969                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9970
9971        if (NAME_NOT_FOUND != testPatternMode) {
9972            cam_test_pattern_data_t testPatternData;
9973            memset(&testPatternData, 0, sizeof(testPatternData));
9974            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9975            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9976                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9977                int32_t *fwk_testPatternData =
9978                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9979                testPatternData.r = fwk_testPatternData[0];
9980                testPatternData.b = fwk_testPatternData[3];
9981                switch (gCamCapability[mCameraId]->color_arrangement) {
9982                    case CAM_FILTER_ARRANGEMENT_RGGB:
9983                    case CAM_FILTER_ARRANGEMENT_GRBG:
9984                        testPatternData.gr = fwk_testPatternData[1];
9985                        testPatternData.gb = fwk_testPatternData[2];
9986                        break;
9987                    case CAM_FILTER_ARRANGEMENT_GBRG:
9988                    case CAM_FILTER_ARRANGEMENT_BGGR:
9989                        testPatternData.gr = fwk_testPatternData[2];
9990                        testPatternData.gb = fwk_testPatternData[1];
9991                        break;
9992                    default:
9993                        LOGE("color arrangement %d is not supported",
9994                                gCamCapability[mCameraId]->color_arrangement);
9995                        break;
9996                }
9997            }
9998            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9999                    testPatternData)) {
10000                rc = BAD_VALUE;
10001            }
10002        } else {
10003            LOGE("Invalid framework sensor test pattern mode %d",
10004                    fwk_testPatternMode);
10005        }
10006    }
10007
10008    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
10009        size_t count = 0;
10010        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
10011        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
10012                gps_coords.data.d, gps_coords.count, count);
10013        if (gps_coords.count != count) {
10014            rc = BAD_VALUE;
10015        }
10016    }
10017
10018    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
10019        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
10020        size_t count = 0;
10021        const char *gps_methods_src = (const char *)
10022                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
10023        memset(gps_methods, '\0', sizeof(gps_methods));
10024        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
10025        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
10026                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
10027        if (GPS_PROCESSING_METHOD_SIZE != count) {
10028            rc = BAD_VALUE;
10029        }
10030    }
10031
10032    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
10033        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
10034        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
10035                gps_timestamp)) {
10036            rc = BAD_VALUE;
10037        }
10038    }
10039
10040    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
10041        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
10042        cam_rotation_info_t rotation_info;
10043        if (orientation == 0) {
10044           rotation_info.rotation = ROTATE_0;
10045        } else if (orientation == 90) {
10046           rotation_info.rotation = ROTATE_90;
10047        } else if (orientation == 180) {
10048           rotation_info.rotation = ROTATE_180;
10049        } else if (orientation == 270) {
10050           rotation_info.rotation = ROTATE_270;
10051        }
10052        rotation_info.device_rotation = ROTATE_0;
10053        rotation_info.streamId = snapshotStreamId;
10054        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
10055        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
10056            rc = BAD_VALUE;
10057        }
10058    }
10059
10060    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
10061        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
10062        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
10063            rc = BAD_VALUE;
10064        }
10065    }
10066
10067    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
10068        uint32_t thumb_quality = (uint32_t)
10069                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
10070        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
10071                thumb_quality)) {
10072            rc = BAD_VALUE;
10073        }
10074    }
10075
10076    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10077        cam_dimension_t dim;
10078        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10079        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10080        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
10081            rc = BAD_VALUE;
10082        }
10083    }
10084
10085    // Internal metadata
10086    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
10087        size_t count = 0;
10088        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
10089        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
10090                privatedata.data.i32, privatedata.count, count);
10091        if (privatedata.count != count) {
10092            rc = BAD_VALUE;
10093        }
10094    }
10095
10096    // EV step
10097    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
10098            gCamCapability[mCameraId]->exp_compensation_step)) {
10099        rc = BAD_VALUE;
10100    }
10101
10102    // CDS info
10103    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
10104        cam_cds_data_t *cdsData = (cam_cds_data_t *)
10105                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
10106
10107        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10108                CAM_INTF_META_CDS_DATA, *cdsData)) {
10109            rc = BAD_VALUE;
10110        }
10111    }
10112
10113    // Hybrid AE
10114    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
10115        uint8_t *hybrid_ae = (uint8_t *)
10116                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
10117
10118        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10119                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
10120            rc = BAD_VALUE;
10121        }
10122    }
10123
10124    return rc;
10125}
10126
10127/*===========================================================================
10128 * FUNCTION   : captureResultCb
10129 *
10130 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
10131 *
10132 * PARAMETERS :
10133 *   @frame  : frame information from mm-camera-interface
10134 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
10135 *   @userdata: userdata
10136 *
10137 * RETURN     : NONE
10138 *==========================================================================*/
10139void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
10140                camera3_stream_buffer_t *buffer,
10141                uint32_t frame_number, bool isInputBuffer, void *userdata)
10142{
10143    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10144    if (hw == NULL) {
10145        LOGE("Invalid hw %p", hw);
10146        return;
10147    }
10148
10149    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
10150    return;
10151}
10152
10153/*===========================================================================
10154 * FUNCTION   : setBufferErrorStatus
10155 *
10156 * DESCRIPTION: Callback handler for channels to report any buffer errors
10157 *
10158 * PARAMETERS :
10159 *   @ch     : Channel on which buffer error is reported from
10160 *   @frame_number  : frame number on which buffer error is reported on
10161 *   @buffer_status : buffer error status
10162 *   @userdata: userdata
10163 *
10164 * RETURN     : NONE
10165 *==========================================================================*/
10166void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10167                uint32_t frame_number, camera3_buffer_status_t err,
10168                void *userdata)
10169{
10170    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10171    if (hw == NULL) {
10172        LOGE("Invalid hw %p", hw);
10173        return;
10174    }
10175
10176    hw->setBufferErrorStatus(ch, frame_number, err);
10177    return;
10178}
10179
10180void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10181                uint32_t frameNumber, camera3_buffer_status_t err)
10182{
10183    LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
10184    pthread_mutex_lock(&mMutex);
10185
10186    for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
10187        if (req.frame_number != frameNumber)
10188            continue;
10189        for (auto& k : req.mPendingBufferList) {
10190            if(k.stream->priv == ch) {
10191                k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
10192            }
10193        }
10194    }
10195
10196    pthread_mutex_unlock(&mMutex);
10197    return;
10198}
10199/*===========================================================================
10200 * FUNCTION   : initialize
10201 *
10202 * DESCRIPTION: Pass framework callback pointers to HAL
10203 *
10204 * PARAMETERS :
10205 *
10206 *
10207 * RETURN     : Success : 0
10208 *              Failure: -ENODEV
10209 *==========================================================================*/
10210
10211int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
10212                                  const camera3_callback_ops_t *callback_ops)
10213{
10214    LOGD("E");
10215    QCamera3HardwareInterface *hw =
10216        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10217    if (!hw) {
10218        LOGE("NULL camera device");
10219        return -ENODEV;
10220    }
10221
10222    int rc = hw->initialize(callback_ops);
10223    LOGD("X");
10224    return rc;
10225}
10226
10227/*===========================================================================
10228 * FUNCTION   : configure_streams
10229 *
10230 * DESCRIPTION:
10231 *
10232 * PARAMETERS :
10233 *
10234 *
10235 * RETURN     : Success: 0
10236 *              Failure: -EINVAL (if stream configuration is invalid)
10237 *                       -ENODEV (fatal error)
10238 *==========================================================================*/
10239
10240int QCamera3HardwareInterface::configure_streams(
10241        const struct camera3_device *device,
10242        camera3_stream_configuration_t *stream_list)
10243{
10244    LOGD("E");
10245    QCamera3HardwareInterface *hw =
10246        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10247    if (!hw) {
10248        LOGE("NULL camera device");
10249        return -ENODEV;
10250    }
10251    int rc = hw->configureStreams(stream_list);
10252    LOGD("X");
10253    return rc;
10254}
10255
10256/*===========================================================================
10257 * FUNCTION   : construct_default_request_settings
10258 *
10259 * DESCRIPTION: Configure a settings buffer to meet the required use case
10260 *
10261 * PARAMETERS :
10262 *
10263 *
10264 * RETURN     : Success: Return valid metadata
10265 *              Failure: Return NULL
10266 *==========================================================================*/
10267const camera_metadata_t* QCamera3HardwareInterface::
10268    construct_default_request_settings(const struct camera3_device *device,
10269                                        int type)
10270{
10271
10272    LOGD("E");
10273    camera_metadata_t* fwk_metadata = NULL;
10274    QCamera3HardwareInterface *hw =
10275        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10276    if (!hw) {
10277        LOGE("NULL camera device");
10278        return NULL;
10279    }
10280
10281    fwk_metadata = hw->translateCapabilityToMetadata(type);
10282
10283    LOGD("X");
10284    return fwk_metadata;
10285}
10286
10287/*===========================================================================
10288 * FUNCTION   : process_capture_request
10289 *
10290 * DESCRIPTION:
10291 *
10292 * PARAMETERS :
10293 *
10294 *
10295 * RETURN     :
10296 *==========================================================================*/
10297int QCamera3HardwareInterface::process_capture_request(
10298                    const struct camera3_device *device,
10299                    camera3_capture_request_t *request)
10300{
10301    LOGD("E");
10302    QCamera3HardwareInterface *hw =
10303        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10304    if (!hw) {
10305        LOGE("NULL camera device");
10306        return -EINVAL;
10307    }
10308
10309    int rc = hw->processCaptureRequest(request);
10310    LOGD("X");
10311    return rc;
10312}
10313
10314/*===========================================================================
10315 * FUNCTION   : dump
10316 *
10317 * DESCRIPTION:
10318 *
10319 * PARAMETERS :
10320 *
10321 *
10322 * RETURN     :
10323 *==========================================================================*/
10324
10325void QCamera3HardwareInterface::dump(
10326                const struct camera3_device *device, int fd)
10327{
10328    /* Log level property is read when "adb shell dumpsys media.camera" is
10329       called so that the log level can be controlled without restarting
10330       the media server */
10331    getLogLevel();
10332
10333    LOGD("E");
10334    QCamera3HardwareInterface *hw =
10335        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10336    if (!hw) {
10337        LOGE("NULL camera device");
10338        return;
10339    }
10340
10341    hw->dump(fd);
10342    LOGD("X");
10343    return;
10344}
10345
10346/*===========================================================================
10347 * FUNCTION   : flush
10348 *
10349 * DESCRIPTION:
10350 *
10351 * PARAMETERS :
10352 *
10353 *
10354 * RETURN     :
10355 *==========================================================================*/
10356
10357int QCamera3HardwareInterface::flush(
10358                const struct camera3_device *device)
10359{
10360    int rc;
10361    LOGD("E");
10362    QCamera3HardwareInterface *hw =
10363        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10364    if (!hw) {
10365        LOGE("NULL camera device");
10366        return -EINVAL;
10367    }
10368
10369    pthread_mutex_lock(&hw->mMutex);
10370    // Validate current state
10371    switch (hw->mState) {
10372        case STARTED:
10373            /* valid state */
10374            break;
10375
10376        case ERROR:
10377            pthread_mutex_unlock(&hw->mMutex);
10378            hw->handleCameraDeviceError();
10379            return -ENODEV;
10380
10381        default:
10382            LOGI("Flush returned during state %d", hw->mState);
10383            pthread_mutex_unlock(&hw->mMutex);
10384            return 0;
10385    }
10386    pthread_mutex_unlock(&hw->mMutex);
10387
10388    rc = hw->flush(true /* restart channels */ );
10389    LOGD("X");
10390    return rc;
10391}
10392
10393/*===========================================================================
10394 * FUNCTION   : close_camera_device
10395 *
10396 * DESCRIPTION:
10397 *
10398 * PARAMETERS :
10399 *
10400 *
10401 * RETURN     :
10402 *==========================================================================*/
10403int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10404{
10405    int ret = NO_ERROR;
10406    QCamera3HardwareInterface *hw =
10407        reinterpret_cast<QCamera3HardwareInterface *>(
10408            reinterpret_cast<camera3_device_t *>(device)->priv);
10409    if (!hw) {
10410        LOGE("NULL camera device");
10411        return BAD_VALUE;
10412    }
10413
10414    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10415    delete hw;
10416    LOGI("[KPI Perf]: X");
10417    return ret;
10418}
10419
10420/*===========================================================================
10421 * FUNCTION   : getWaveletDenoiseProcessPlate
10422 *
10423 * DESCRIPTION: query wavelet denoise process plate
10424 *
10425 * PARAMETERS : None
10426 *
10427 * RETURN     : WNR prcocess plate value
10428 *==========================================================================*/
10429cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10430{
10431    char prop[PROPERTY_VALUE_MAX];
10432    memset(prop, 0, sizeof(prop));
10433    property_get("persist.denoise.process.plates", prop, "0");
10434    int processPlate = atoi(prop);
10435    switch(processPlate) {
10436    case 0:
10437        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10438    case 1:
10439        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10440    case 2:
10441        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10442    case 3:
10443        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10444    default:
10445        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10446    }
10447}
10448
10449
10450/*===========================================================================
10451 * FUNCTION   : getTemporalDenoiseProcessPlate
10452 *
10453 * DESCRIPTION: query temporal denoise process plate
10454 *
10455 * PARAMETERS : None
10456 *
10457 * RETURN     : TNR prcocess plate value
10458 *==========================================================================*/
10459cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10460{
10461    char prop[PROPERTY_VALUE_MAX];
10462    memset(prop, 0, sizeof(prop));
10463    property_get("persist.tnr.process.plates", prop, "0");
10464    int processPlate = atoi(prop);
10465    switch(processPlate) {
10466    case 0:
10467        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10468    case 1:
10469        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10470    case 2:
10471        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10472    case 3:
10473        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10474    default:
10475        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10476    }
10477}
10478
10479
10480/*===========================================================================
10481 * FUNCTION   : extractSceneMode
10482 *
10483 * DESCRIPTION: Extract scene mode from frameworks set metadata
10484 *
10485 * PARAMETERS :
10486 *      @frame_settings: CameraMetadata reference
10487 *      @metaMode: ANDROID_CONTORL_MODE
10488 *      @hal_metadata: hal metadata structure
10489 *
10490 * RETURN     : None
10491 *==========================================================================*/
10492int32_t QCamera3HardwareInterface::extractSceneMode(
10493        const CameraMetadata &frame_settings, uint8_t metaMode,
10494        metadata_buffer_t *hal_metadata)
10495{
10496    int32_t rc = NO_ERROR;
10497
10498    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10499        camera_metadata_ro_entry entry =
10500                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10501        if (0 == entry.count)
10502            return rc;
10503
10504        uint8_t fwk_sceneMode = entry.data.u8[0];
10505
10506        int val = lookupHalName(SCENE_MODES_MAP,
10507                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10508                fwk_sceneMode);
10509        if (NAME_NOT_FOUND != val) {
10510            uint8_t sceneMode = (uint8_t)val;
10511            LOGD("sceneMode: %d", sceneMode);
10512            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10513                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10514                rc = BAD_VALUE;
10515            }
10516        }
10517    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10518            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10519        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10520        LOGD("sceneMode: %d", sceneMode);
10521        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10522                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10523            rc = BAD_VALUE;
10524        }
10525    }
10526    return rc;
10527}
10528
10529/*===========================================================================
10530 * FUNCTION   : needRotationReprocess
10531 *
10532 * DESCRIPTION: if rotation needs to be done by reprocess in pp
10533 *
10534 * PARAMETERS : none
10535 *
10536 * RETURN     : true: needed
10537 *              false: no need
10538 *==========================================================================*/
10539bool QCamera3HardwareInterface::needRotationReprocess()
10540{
10541    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10542        // current rotation is not zero, and pp has the capability to process rotation
10543        LOGH("need do reprocess for rotation");
10544        return true;
10545    }
10546
10547    return false;
10548}
10549
10550/*===========================================================================
10551 * FUNCTION   : needReprocess
10552 *
10553 * DESCRIPTION: if reprocess in needed
10554 *
10555 * PARAMETERS : none
10556 *
10557 * RETURN     : true: needed
10558 *              false: no need
10559 *==========================================================================*/
10560bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10561{
10562    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10563        // TODO: add for ZSL HDR later
10564        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10565        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10566            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10567            return true;
10568        } else {
10569            LOGH("already post processed frame");
10570            return false;
10571        }
10572    }
10573    return needRotationReprocess();
10574}
10575
10576/*===========================================================================
10577 * FUNCTION   : needJpegExifRotation
10578 *
10579 * DESCRIPTION: if rotation from jpeg is needed
10580 *
10581 * PARAMETERS : none
10582 *
10583 * RETURN     : true: needed
10584 *              false: no need
10585 *==========================================================================*/
10586bool QCamera3HardwareInterface::needJpegExifRotation()
10587{
10588   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10589    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10590       LOGD("Need use Jpeg EXIF Rotation");
10591       return true;
10592    }
10593    return false;
10594}
10595
10596/*===========================================================================
10597 * FUNCTION   : addOfflineReprocChannel
10598 *
10599 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10600 *              coming from input channel
10601 *
10602 * PARAMETERS :
10603 *   @config  : reprocess configuration
10604 *   @inputChHandle : pointer to the input (source) channel
10605 *
10606 *
10607 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
10608 *==========================================================================*/
10609QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10610        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10611{
10612    int32_t rc = NO_ERROR;
10613    QCamera3ReprocessChannel *pChannel = NULL;
10614
10615    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10616            mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
10617            config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10618    if (NULL == pChannel) {
10619        LOGE("no mem for reprocess channel");
10620        return NULL;
10621    }
10622
10623    rc = pChannel->initialize(IS_TYPE_NONE);
10624    if (rc != NO_ERROR) {
10625        LOGE("init reprocess channel failed, ret = %d", rc);
10626        delete pChannel;
10627        return NULL;
10628    }
10629
10630    // pp feature config
10631    cam_pp_feature_config_t pp_config;
10632    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10633
10634    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10635    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10636            & CAM_QCOM_FEATURE_DSDN) {
10637        //Use CPP CDS incase h/w supports it.
10638        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10639        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10640    }
10641    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10642        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10643    }
10644
10645    rc = pChannel->addReprocStreamsFromSource(pp_config,
10646            config,
10647            IS_TYPE_NONE,
10648            mMetadataChannel);
10649
10650    if (rc != NO_ERROR) {
10651        delete pChannel;
10652        return NULL;
10653    }
10654    return pChannel;
10655}
10656
10657/*===========================================================================
10658 * FUNCTION   : getMobicatMask
10659 *
10660 * DESCRIPTION: returns mobicat mask
10661 *
10662 * PARAMETERS : none
10663 *
10664 * RETURN     : mobicat mask
10665 *
10666 *==========================================================================*/
10667uint8_t QCamera3HardwareInterface::getMobicatMask()
10668{
10669    return m_MobicatMask;
10670}
10671
10672/*===========================================================================
10673 * FUNCTION   : setMobicat
10674 *
10675 * DESCRIPTION: set Mobicat on/off.
10676 *
10677 * PARAMETERS :
10678 *   @params  : none
10679 *
10680 * RETURN     : int32_t type of status
10681 *              NO_ERROR  -- success
10682 *              none-zero failure code
10683 *==========================================================================*/
10684int32_t QCamera3HardwareInterface::setMobicat()
10685{
10686    char value [PROPERTY_VALUE_MAX];
10687    property_get("persist.camera.mobicat", value, "0");
10688    int32_t ret = NO_ERROR;
10689    uint8_t enableMobi = (uint8_t)atoi(value);
10690
10691    if (enableMobi) {
10692        tune_cmd_t tune_cmd;
10693        tune_cmd.type = SET_RELOAD_CHROMATIX;
10694        tune_cmd.module = MODULE_ALL;
10695        tune_cmd.value = TRUE;
10696        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10697                CAM_INTF_PARM_SET_VFE_COMMAND,
10698                tune_cmd);
10699
10700        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10701                CAM_INTF_PARM_SET_PP_COMMAND,
10702                tune_cmd);
10703    }
10704    m_MobicatMask = enableMobi;
10705
10706    return ret;
10707}
10708
10709/*===========================================================================
10710* FUNCTION   : getLogLevel
10711*
10712* DESCRIPTION: Reads the log level property into a variable
10713*
10714* PARAMETERS :
10715*   None
10716*
10717* RETURN     :
10718*   None
10719*==========================================================================*/
10720void QCamera3HardwareInterface::getLogLevel()
10721{
10722    char prop[PROPERTY_VALUE_MAX];
10723    uint32_t globalLogLevel = 0;
10724
10725    property_get("persist.camera.hal.debug", prop, "0");
10726    int val = atoi(prop);
10727    if (0 <= val) {
10728        gCamHal3LogLevel = (uint32_t)val;
10729    }
10730
10731    property_get("persist.camera.kpi.debug", prop, "1");
10732    gKpiDebugLevel = atoi(prop);
10733
10734    property_get("persist.camera.global.debug", prop, "0");
10735    val = atoi(prop);
10736    if (0 <= val) {
10737        globalLogLevel = (uint32_t)val;
10738    }
10739
10740    /* Highest log level among hal.logs and global.logs is selected */
10741    if (gCamHal3LogLevel < globalLogLevel)
10742        gCamHal3LogLevel = globalLogLevel;
10743
10744    return;
10745}
10746
10747/*===========================================================================
10748 * FUNCTION   : validateStreamRotations
10749 *
10750 * DESCRIPTION: Check if the rotations requested are supported
10751 *
10752 * PARAMETERS :
10753 *   @stream_list : streams to be configured
10754 *
10755 * RETURN     : NO_ERROR on success
10756 *              -EINVAL on failure
10757 *
10758 *==========================================================================*/
10759int QCamera3HardwareInterface::validateStreamRotations(
10760        camera3_stream_configuration_t *streamList)
10761{
10762    int rc = NO_ERROR;
10763
10764    /*
10765    * Loop through all streams requested in configuration
10766    * Check if unsupported rotations have been requested on any of them
10767    */
10768    for (size_t j = 0; j < streamList->num_streams; j++){
10769        camera3_stream_t *newStream = streamList->streams[j];
10770
10771        switch(newStream->rotation) {
10772            case CAMERA3_STREAM_ROTATION_0:
10773            case CAMERA3_STREAM_ROTATION_90:
10774            case CAMERA3_STREAM_ROTATION_180:
10775            case CAMERA3_STREAM_ROTATION_270:
10776                //Expected values
10777                break;
10778            default:
10779                LOGE("Error: Unsupported rotation of %d requested for stream"
10780                        "type:%d and stream format:%d",
10781                        newStream->rotation, newStream->stream_type,
10782                        newStream->format);
10783                return -EINVAL;
10784        }
10785
10786        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10787        bool isImplDef = (newStream->format ==
10788                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10789        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10790                isImplDef);
10791
10792        if (isRotated && (!isImplDef || isZsl)) {
10793            LOGE("Error: Unsupported rotation of %d requested for stream"
10794                    "type:%d and stream format:%d",
10795                    newStream->rotation, newStream->stream_type,
10796                    newStream->format);
10797            rc = -EINVAL;
10798            break;
10799        }
10800    }
10801
10802    return rc;
10803}
10804
10805/*===========================================================================
10806* FUNCTION   : getFlashInfo
10807*
10808* DESCRIPTION: Retrieve information about whether the device has a flash.
10809*
10810* PARAMETERS :
10811*   @cameraId  : Camera id to query
10812*   @hasFlash  : Boolean indicating whether there is a flash device
10813*                associated with given camera
10814*   @flashNode : If a flash device exists, this will be its device node.
10815*
10816* RETURN     :
10817*   None
10818*==========================================================================*/
10819void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10820        bool& hasFlash,
10821        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10822{
10823    cam_capability_t* camCapability = gCamCapability[cameraId];
10824    if (NULL == camCapability) {
10825        hasFlash = false;
10826        flashNode[0] = '\0';
10827    } else {
10828        hasFlash = camCapability->flash_available;
10829        strlcpy(flashNode,
10830                (char*)camCapability->flash_dev_name,
10831                QCAMERA_MAX_FILEPATH_LENGTH);
10832    }
10833}
10834
10835/*===========================================================================
10836* FUNCTION   : getEepromVersionInfo
10837*
10838* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10839*
10840* PARAMETERS : None
10841*
10842* RETURN     : string describing EEPROM version
10843*              "\0" if no such info available
10844*==========================================================================*/
10845const char *QCamera3HardwareInterface::getEepromVersionInfo()
10846{
10847    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10848}
10849
10850/*===========================================================================
10851* FUNCTION   : getLdafCalib
10852*
10853* DESCRIPTION: Retrieve Laser AF calibration data
10854*
10855* PARAMETERS : None
10856*
10857* RETURN     : Two uint32_t describing laser AF calibration data
10858*              NULL if none is available.
10859*==========================================================================*/
10860const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10861{
10862    if (mLdafCalibExist) {
10863        return &mLdafCalib[0];
10864    } else {
10865        return NULL;
10866    }
10867}
10868
10869/*===========================================================================
10870 * FUNCTION   : dynamicUpdateMetaStreamInfo
10871 *
10872 * DESCRIPTION: This function:
10873 *             (1) stops all the channels
10874 *             (2) returns error on pending requests and buffers
10875 *             (3) sends metastream_info in setparams
10876 *             (4) starts all channels
10877 *             This is useful when sensor has to be restarted to apply any
10878 *             settings such as frame rate from a different sensor mode
10879 *
10880 * PARAMETERS : None
10881 *
10882 * RETURN     : NO_ERROR on success
10883 *              Error codes on failure
10884 *
10885 *==========================================================================*/
10886int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10887{
10888    ATRACE_CALL();
10889    int rc = NO_ERROR;
10890
10891    LOGD("E");
10892
10893    rc = stopAllChannels();
10894    if (rc < 0) {
10895        LOGE("stopAllChannels failed");
10896        return rc;
10897    }
10898
10899    rc = notifyErrorForPendingRequests();
10900    if (rc < 0) {
10901        LOGE("notifyErrorForPendingRequests failed");
10902        return rc;
10903    }
10904
10905    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10906        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10907                "Format:%d",
10908                mStreamConfigInfo.type[i],
10909                mStreamConfigInfo.stream_sizes[i].width,
10910                mStreamConfigInfo.stream_sizes[i].height,
10911                mStreamConfigInfo.postprocess_mask[i],
10912                mStreamConfigInfo.format[i]);
10913    }
10914
10915    /* Send meta stream info once again so that ISP can start */
10916    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10917            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10918    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10919            mParameters);
10920    if (rc < 0) {
10921        LOGE("set Metastreaminfo failed. Sensor mode does not change");
10922    }
10923
10924    rc = startAllChannels();
10925    if (rc < 0) {
10926        LOGE("startAllChannels failed");
10927        return rc;
10928    }
10929
10930    LOGD("X");
10931    return rc;
10932}
10933
10934/*===========================================================================
10935 * FUNCTION   : stopAllChannels
10936 *
10937 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10938 *
10939 * PARAMETERS : None
10940 *
10941 * RETURN     : NO_ERROR on success
10942 *              Error codes on failure
10943 *
10944 *==========================================================================*/
10945int32_t QCamera3HardwareInterface::stopAllChannels()
10946{
10947    int32_t rc = NO_ERROR;
10948
10949    LOGD("Stopping all channels");
10950    // Stop the Streams/Channels
10951    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10952        it != mStreamInfo.end(); it++) {
10953        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10954        if (channel) {
10955            channel->stop();
10956        }
10957        (*it)->status = INVALID;
10958    }
10959
10960    if (mSupportChannel) {
10961        mSupportChannel->stop();
10962    }
10963    if (mAnalysisChannel) {
10964        mAnalysisChannel->stop();
10965    }
10966    if (mRawDumpChannel) {
10967        mRawDumpChannel->stop();
10968    }
10969    if (mMetadataChannel) {
10970        /* If content of mStreamInfo is not 0, there is metadata stream */
10971        mMetadataChannel->stop();
10972    }
10973
10974    LOGD("All channels stopped");
10975    return rc;
10976}
10977
10978/*===========================================================================
10979 * FUNCTION   : startAllChannels
10980 *
10981 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10982 *
10983 * PARAMETERS : None
10984 *
10985 * RETURN     : NO_ERROR on success
10986 *              Error codes on failure
10987 *
10988 *==========================================================================*/
10989int32_t QCamera3HardwareInterface::startAllChannels()
10990{
10991    int32_t rc = NO_ERROR;
10992
10993    LOGD("Start all channels ");
10994    // Start the Streams/Channels
10995    if (mMetadataChannel) {
10996        /* If content of mStreamInfo is not 0, there is metadata stream */
10997        rc = mMetadataChannel->start();
10998        if (rc < 0) {
10999            LOGE("META channel start failed");
11000            return rc;
11001        }
11002    }
11003    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11004        it != mStreamInfo.end(); it++) {
11005        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11006        if (channel) {
11007            rc = channel->start();
11008            if (rc < 0) {
11009                LOGE("channel start failed");
11010                return rc;
11011            }
11012        }
11013    }
11014    if (mAnalysisChannel) {
11015        mAnalysisChannel->start();
11016    }
11017    if (mSupportChannel) {
11018        rc = mSupportChannel->start();
11019        if (rc < 0) {
11020            LOGE("Support channel start failed");
11021            return rc;
11022        }
11023    }
11024    if (mRawDumpChannel) {
11025        rc = mRawDumpChannel->start();
11026        if (rc < 0) {
11027            LOGE("RAW dump channel start failed");
11028            return rc;
11029        }
11030    }
11031
11032    LOGD("All channels started");
11033    return rc;
11034}
11035
11036/*===========================================================================
11037 * FUNCTION   : notifyErrorForPendingRequests
11038 *
11039 * DESCRIPTION: This function sends error for all the pending requests/buffers
11040 *
11041 * PARAMETERS : None
11042 *
11043 * RETURN     : Error codes
11044 *              NO_ERROR on success
11045 *
11046 *==========================================================================*/
11047int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
11048{
11049    int32_t rc = NO_ERROR;
11050    unsigned int frameNum = 0;
11051    camera3_capture_result_t result;
11052    camera3_stream_buffer_t *pStream_Buf = NULL;
11053
11054    memset(&result, 0, sizeof(camera3_capture_result_t));
11055
11056    if (mPendingRequestsList.size() > 0) {
11057        pendingRequestIterator i = mPendingRequestsList.begin();
11058        frameNum = i->frame_number;
11059    } else {
11060        /* There might still be pending buffers even though there are
11061         no pending requests. Setting the frameNum to MAX so that
11062         all the buffers with smaller frame numbers are returned */
11063        frameNum = UINT_MAX;
11064    }
11065
11066    LOGH("Oldest frame num on mPendingRequestsList = %u",
11067       frameNum);
11068
11069    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
11070            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
11071
11072        if (req->frame_number < frameNum) {
11073            // Send Error notify to frameworks for each buffer for which
11074            // metadata buffer is already sent
11075            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
11076                req->frame_number, req->mPendingBufferList.size());
11077
11078            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11079            if (NULL == pStream_Buf) {
11080                LOGE("No memory for pending buffers array");
11081                return NO_MEMORY;
11082            }
11083            memset(pStream_Buf, 0,
11084                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11085            result.result = NULL;
11086            result.frame_number = req->frame_number;
11087            result.num_output_buffers = req->mPendingBufferList.size();
11088            result.output_buffers = pStream_Buf;
11089
11090            size_t index = 0;
11091            for (auto info = req->mPendingBufferList.begin();
11092                info != req->mPendingBufferList.end(); ) {
11093
11094                camera3_notify_msg_t notify_msg;
11095                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
11096                notify_msg.type = CAMERA3_MSG_ERROR;
11097                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
11098                notify_msg.message.error.error_stream = info->stream;
11099                notify_msg.message.error.frame_number = req->frame_number;
11100                pStream_Buf[index].acquire_fence = -1;
11101                pStream_Buf[index].release_fence = -1;
11102                pStream_Buf[index].buffer = info->buffer;
11103                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11104                pStream_Buf[index].stream = info->stream;
11105                mCallbackOps->notify(mCallbackOps, &notify_msg);
11106                index++;
11107                // Remove buffer from list
11108                info = req->mPendingBufferList.erase(info);
11109            }
11110
11111            // Remove this request from Map
11112            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11113                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11114            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11115
11116            mCallbackOps->process_capture_result(mCallbackOps, &result);
11117
11118            delete [] pStream_Buf;
11119        } else {
11120
11121            // Go through the pending requests info and send error request to framework
11122            LOGE("Sending ERROR REQUEST for all pending requests");
11123            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
11124
11125            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
11126
11127            // Send error notify to frameworks
11128            camera3_notify_msg_t notify_msg;
11129            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
11130            notify_msg.type = CAMERA3_MSG_ERROR;
11131            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
11132            notify_msg.message.error.error_stream = NULL;
11133            notify_msg.message.error.frame_number = req->frame_number;
11134            mCallbackOps->notify(mCallbackOps, &notify_msg);
11135
11136            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11137            if (NULL == pStream_Buf) {
11138                LOGE("No memory for pending buffers array");
11139                return NO_MEMORY;
11140            }
11141            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11142
11143            result.result = NULL;
11144            result.frame_number = req->frame_number;
11145            result.input_buffer = i->input_buffer;
11146            result.num_output_buffers = req->mPendingBufferList.size();
11147            result.output_buffers = pStream_Buf;
11148
11149            size_t index = 0;
11150            for (auto info = req->mPendingBufferList.begin();
11151                info != req->mPendingBufferList.end(); ) {
11152                pStream_Buf[index].acquire_fence = -1;
11153                pStream_Buf[index].release_fence = -1;
11154                pStream_Buf[index].buffer = info->buffer;
11155                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11156                pStream_Buf[index].stream = info->stream;
11157                index++;
11158                // Remove buffer from list
11159                info = req->mPendingBufferList.erase(info);
11160            }
11161
11162            // Remove this request from Map
11163            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11164                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11165            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11166
11167            mCallbackOps->process_capture_result(mCallbackOps, &result);
11168            delete [] pStream_Buf;
11169            i = erasePendingRequest(i);
11170        }
11171    }
11172
11173    /* Reset pending frame Drop list and requests list */
11174    mPendingFrameDropList.clear();
11175
11176    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
11177        req.mPendingBufferList.clear();
11178    }
11179    mPendingBuffersMap.mPendingBuffersInRequest.clear();
11180    mPendingReprocessResultList.clear();
11181    LOGH("Cleared all the pending buffers ");
11182
11183    return rc;
11184}
11185
11186bool QCamera3HardwareInterface::isOnEncoder(
11187        const cam_dimension_t max_viewfinder_size,
11188        uint32_t width, uint32_t height)
11189{
11190    return (width > (uint32_t)max_viewfinder_size.width ||
11191            height > (uint32_t)max_viewfinder_size.height);
11192}
11193
11194/*===========================================================================
11195 * FUNCTION   : setBundleInfo
11196 *
11197 * DESCRIPTION: Set bundle info for all streams that are bundle.
11198 *
11199 * PARAMETERS : None
11200 *
11201 * RETURN     : NO_ERROR on success
11202 *              Error codes on failure
11203 *==========================================================================*/
11204int32_t QCamera3HardwareInterface::setBundleInfo()
11205{
11206    int32_t rc = NO_ERROR;
11207
11208    if (mChannelHandle) {
11209        cam_bundle_config_t bundleInfo;
11210        memset(&bundleInfo, 0, sizeof(bundleInfo));
11211        rc = mCameraHandle->ops->get_bundle_info(
11212                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
11213        if (rc != NO_ERROR) {
11214            LOGE("get_bundle_info failed");
11215            return rc;
11216        }
11217        if (mAnalysisChannel) {
11218            mAnalysisChannel->setBundleInfo(bundleInfo);
11219        }
11220        if (mSupportChannel) {
11221            mSupportChannel->setBundleInfo(bundleInfo);
11222        }
11223        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11224                it != mStreamInfo.end(); it++) {
11225            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11226            channel->setBundleInfo(bundleInfo);
11227        }
11228        if (mRawDumpChannel) {
11229            mRawDumpChannel->setBundleInfo(bundleInfo);
11230        }
11231    }
11232
11233    return rc;
11234}
11235
11236/*===========================================================================
11237 * FUNCTION   : get_num_overall_buffers
11238 *
11239 * DESCRIPTION: Estimate number of pending buffers across all requests.
11240 *
11241 * PARAMETERS : None
11242 *
11243 * RETURN     : Number of overall pending buffers
11244 *
11245 *==========================================================================*/
11246uint32_t PendingBuffersMap::get_num_overall_buffers()
11247{
11248    uint32_t sum_buffers = 0;
11249    for (auto &req : mPendingBuffersInRequest) {
11250        sum_buffers += req.mPendingBufferList.size();
11251    }
11252    return sum_buffers;
11253}
11254
11255/*===========================================================================
11256 * FUNCTION   : removeBuf
11257 *
11258 * DESCRIPTION: Remove a matching buffer from tracker.
11259 *
11260 * PARAMETERS : @buffer: image buffer for the callback
11261 *
11262 * RETURN     : None
11263 *
11264 *==========================================================================*/
11265void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
11266{
11267    bool buffer_found = false;
11268    for (auto req = mPendingBuffersInRequest.begin();
11269            req != mPendingBuffersInRequest.end(); req++) {
11270        for (auto k = req->mPendingBufferList.begin();
11271                k != req->mPendingBufferList.end(); k++ ) {
11272            if (k->buffer == buffer) {
11273                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
11274                        req->frame_number, buffer);
11275                k = req->mPendingBufferList.erase(k);
11276                if (req->mPendingBufferList.empty()) {
11277                    // Remove this request from Map
11278                    req = mPendingBuffersInRequest.erase(req);
11279                }
11280                buffer_found = true;
11281                break;
11282            }
11283        }
11284        if (buffer_found) {
11285            break;
11286        }
11287    }
11288    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11289            get_num_overall_buffers());
11290}
11291
11292/*===========================================================================
11293 * FUNCTION   : getBufErrStatus
11294 *
11295 * DESCRIPTION: get buffer error status
11296 *
11297 * PARAMETERS : @buffer: buffer handle
11298 *
11299 * RETURN     : None
11300 *
11301 *==========================================================================*/
11302int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
11303{
11304    for (auto& req : mPendingBuffersInRequest) {
11305        for (auto& k : req.mPendingBufferList) {
11306            if (k.buffer == buffer)
11307                return k.bufStatus;
11308        }
11309    }
11310    return CAMERA3_BUFFER_STATUS_OK;
11311}
11312
11313/*===========================================================================
11314 * FUNCTION   : setPAAFSupport
11315 *
11316 * DESCRIPTION: Set the preview-assisted auto focus support bit in
11317 *              feature mask according to stream type and filter
11318 *              arrangement
11319 *
11320 * PARAMETERS : @feature_mask: current feature mask, which may be modified
11321 *              @stream_type: stream type
11322 *              @filter_arrangement: filter arrangement
11323 *
11324 * RETURN     : None
11325 *==========================================================================*/
11326void QCamera3HardwareInterface::setPAAFSupport(
11327        cam_feature_mask_t& feature_mask,
11328        cam_stream_type_t stream_type,
11329        cam_color_filter_arrangement_t filter_arrangement)
11330{
11331    switch (filter_arrangement) {
11332    case CAM_FILTER_ARRANGEMENT_RGGB:
11333    case CAM_FILTER_ARRANGEMENT_GRBG:
11334    case CAM_FILTER_ARRANGEMENT_GBRG:
11335    case CAM_FILTER_ARRANGEMENT_BGGR:
11336        if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11337                (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11338            feature_mask |= CAM_QCOM_FEATURE_PAAF;
11339        }
11340        break;
11341    case CAM_FILTER_ARRANGEMENT_Y:
11342        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11343            feature_mask |= CAM_QCOM_FEATURE_PAAF;
11344        }
11345        break;
11346    default:
11347        break;
11348    }
11349    LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11350            feature_mask, stream_type, filter_arrangement);
11351
11352
11353}
11354
11355/*===========================================================================
11356 * FUNCTION   : adjustBlackLevelForCFA
11357 *
11358 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
11359 *              of bayer CFA (Color Filter Array).
11360 *
11361 * PARAMETERS : @input: black level pattern in the order of RGGB
11362 *              @output: black level pattern in the order of CFA
11363 *              @color_arrangement: CFA color arrangement
11364 *
11365 * RETURN     : None
11366 *==========================================================================*/
11367template<typename T>
11368void QCamera3HardwareInterface::adjustBlackLevelForCFA(
11369        T input[BLACK_LEVEL_PATTERN_CNT],
11370        T output[BLACK_LEVEL_PATTERN_CNT],
11371        cam_color_filter_arrangement_t color_arrangement)
11372{
11373    switch (color_arrangement) {
11374    case CAM_FILTER_ARRANGEMENT_GRBG:
11375        output[0] = input[1];
11376        output[1] = input[0];
11377        output[2] = input[3];
11378        output[3] = input[2];
11379        break;
11380    case CAM_FILTER_ARRANGEMENT_GBRG:
11381        output[0] = input[2];
11382        output[1] = input[3];
11383        output[2] = input[0];
11384        output[3] = input[1];
11385        break;
11386    case CAM_FILTER_ARRANGEMENT_BGGR:
11387        output[0] = input[3];
11388        output[1] = input[2];
11389        output[2] = input[1];
11390        output[3] = input[0];
11391        break;
11392    case CAM_FILTER_ARRANGEMENT_RGGB:
11393        output[0] = input[0];
11394        output[1] = input[1];
11395        output[2] = input[2];
11396        output[3] = input[3];
11397        break;
11398    default:
11399        LOGE("Invalid color arrangement to derive dynamic blacklevel");
11400        break;
11401    }
11402}
11403
11404/*===========================================================================
11405 * FUNCTION   : is60HzZone
11406 *
11407 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
11408 *
11409 * PARAMETERS : None
11410 *
11411 * RETURN     : True if in 60Hz zone, False otherwise
11412 *==========================================================================*/
11413bool QCamera3HardwareInterface::is60HzZone()
11414{
11415    time_t t = time(NULL);
11416    struct tm lt;
11417
11418    struct tm* r = localtime_r(&t, &lt);
11419
11420    if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
11421        return true;
11422    else
11423        return false;
11424}
11425}; //end namespace qcamera
11426