QCamera3HWI.cpp revision d478123b26a053db6e93b2f8e692b2f6a83f421c
1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include <sync/sync.h>
44#include "gralloc_priv.h"
45
46// Display dependencies
47#include "qdMetaData.h"
48
49// Camera dependencies
50#include "android/QCamera3External.h"
51#include "util/QCameraFlash.h"
52#include "QCamera3HWI.h"
53#include "QCamera3VendorTags.h"
54#include "QCameraTrace.h"
55
56extern "C" {
57#include "mm_camera_dbg.h"
58}
59
60using namespace android;
61
62namespace qcamera {
63
64#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
65
66#define EMPTY_PIPELINE_DELAY 2
67#define PARTIAL_RESULT_COUNT 2
68#define FRAME_SKIP_DELAY     0
69
70#define MAX_VALUE_8BIT ((1<<8)-1)
71#define MAX_VALUE_10BIT ((1<<10)-1)
72#define MAX_VALUE_12BIT ((1<<12)-1)
73
74#define VIDEO_4K_WIDTH  3840
75#define VIDEO_4K_HEIGHT 2160
76
77#define MAX_EIS_WIDTH 3840
78#define MAX_EIS_HEIGHT 2160
79
80#define MAX_RAW_STREAMS        1
81#define MAX_STALLING_STREAMS   1
82#define MAX_PROCESSED_STREAMS  3
83/* Batch mode is enabled only if FPS set is equal to or greater than this */
84#define MIN_FPS_FOR_BATCH_MODE (120)
85#define PREVIEW_FPS_FOR_HFR    (30)
86#define DEFAULT_VIDEO_FPS      (30.0)
87#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
88#define MAX_HFR_BATCH_SIZE     (8)
89#define REGIONS_TUPLE_COUNT    5
90#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
91#define BURST_REPROCESS_PERF_TIME_OUT  (1000) // milliseconds
92// Set a threshold for detection of missing buffers //seconds
93#define MISSING_REQUEST_BUF_TIMEOUT 3
94#define FLUSH_TIMEOUT 3
95#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
96
97#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
98                                              CAM_QCOM_FEATURE_CROP |\
99                                              CAM_QCOM_FEATURE_ROTATION |\
100                                              CAM_QCOM_FEATURE_SHARPNESS |\
101                                              CAM_QCOM_FEATURE_SCALE |\
102                                              CAM_QCOM_FEATURE_CAC |\
103                                              CAM_QCOM_FEATURE_CDS )
104/* Per configuration size for static metadata length*/
105#define PER_CONFIGURATION_SIZE_3 (3)
106
107#define TIMEOUT_NEVER -1
108
109cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
110const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
111extern pthread_mutex_t gCamLock;
112volatile uint32_t gCamHal3LogLevel = 1;
113extern uint8_t gNumCameraSessions;
114
115const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
116    {"On",  CAM_CDS_MODE_ON},
117    {"Off", CAM_CDS_MODE_OFF},
118    {"Auto",CAM_CDS_MODE_AUTO}
119};
120
121const QCamera3HardwareInterface::QCameraMap<
122        camera_metadata_enum_android_control_effect_mode_t,
123        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
124    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
125    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
126    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
127    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
128    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
129    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
130    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
131    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
132    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
133};
134
135const QCamera3HardwareInterface::QCameraMap<
136        camera_metadata_enum_android_control_awb_mode_t,
137        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
138    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
139    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
140    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
141    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
142    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
143    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
144    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
145    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
146    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
147};
148
149const QCamera3HardwareInterface::QCameraMap<
150        camera_metadata_enum_android_control_scene_mode_t,
151        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
152    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
153    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
154    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
155    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
156    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
157    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
158    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
159    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
160    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
161    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
162    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
163    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
164    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
165    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
166    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
167    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
168};
169
170const QCamera3HardwareInterface::QCameraMap<
171        camera_metadata_enum_android_control_af_mode_t,
172        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
173    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
174    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
175    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
176    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
177    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
178    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
179    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
180};
181
182const QCamera3HardwareInterface::QCameraMap<
183        camera_metadata_enum_android_color_correction_aberration_mode_t,
184        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
185    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
186            CAM_COLOR_CORRECTION_ABERRATION_OFF },
187    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
188            CAM_COLOR_CORRECTION_ABERRATION_FAST },
189    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
190            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
191};
192
193const QCamera3HardwareInterface::QCameraMap<
194        camera_metadata_enum_android_control_ae_antibanding_mode_t,
195        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
196    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
197    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
198    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
199    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
200};
201
202const QCamera3HardwareInterface::QCameraMap<
203        camera_metadata_enum_android_control_ae_mode_t,
204        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
205    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
206    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
207    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
208    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
209    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213        camera_metadata_enum_android_flash_mode_t,
214        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
215    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
216    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
217    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221        camera_metadata_enum_android_statistics_face_detect_mode_t,
222        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
223    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
224    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
225    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
226};
227
228const QCamera3HardwareInterface::QCameraMap<
229        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
230        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
231    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
232      CAM_FOCUS_UNCALIBRATED },
233    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
234      CAM_FOCUS_APPROXIMATE },
235    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
236      CAM_FOCUS_CALIBRATED }
237};
238
239const QCamera3HardwareInterface::QCameraMap<
240        camera_metadata_enum_android_lens_state_t,
241        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
242    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
243    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
244};
245
246const int32_t available_thumbnail_sizes[] = {0, 0,
247                                             176, 144,
248                                             240, 144,
249                                             256, 144,
250                                             240, 160,
251                                             256, 154,
252                                             240, 240,
253                                             320, 240};
254
255const QCamera3HardwareInterface::QCameraMap<
256        camera_metadata_enum_android_sensor_test_pattern_mode_t,
257        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
258    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
259    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
260    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
261    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
262    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
263    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
264};
265
266/* Since there is no mapping for all the options some Android enum are not listed.
267 * Also, the order in this list is important because while mapping from HAL to Android it will
268 * traverse from lower to higher index which means that for HAL values that are map to different
269 * Android values, the traverse logic will select the first one found.
270 */
271const QCamera3HardwareInterface::QCameraMap<
272        camera_metadata_enum_android_sensor_reference_illuminant1_t,
273        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
276    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
277    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
278    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
279    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
280    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
281    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
282    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
283    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
284    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
285    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
286    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
287    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
288    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
289    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
294    { 60, CAM_HFR_MODE_60FPS},
295    { 90, CAM_HFR_MODE_90FPS},
296    { 120, CAM_HFR_MODE_120FPS},
297    { 150, CAM_HFR_MODE_150FPS},
298    { 180, CAM_HFR_MODE_180FPS},
299    { 210, CAM_HFR_MODE_210FPS},
300    { 240, CAM_HFR_MODE_240FPS},
301    { 480, CAM_HFR_MODE_480FPS},
302};
303
304camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
305    .initialize                         = QCamera3HardwareInterface::initialize,
306    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
307    .register_stream_buffers            = NULL,
308    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
309    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
310    .get_metadata_vendor_tag_ops        = NULL,
311    .dump                               = QCamera3HardwareInterface::dump,
312    .flush                              = QCamera3HardwareInterface::flush,
313    .reserved                           = {0},
314};
315
316// initialise to some default value
317uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
318
319/*===========================================================================
320 * FUNCTION   : QCamera3HardwareInterface
321 *
322 * DESCRIPTION: constructor of QCamera3HardwareInterface
323 *
324 * PARAMETERS :
325 *   @cameraId  : camera ID
326 *
327 * RETURN     : none
328 *==========================================================================*/
329QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
330        const camera_module_callbacks_t *callbacks)
331    : mCameraId(cameraId),
332      mCameraHandle(NULL),
333      mCameraInitialized(false),
334      mCallbackOps(NULL),
335      mMetadataChannel(NULL),
336      mPictureChannel(NULL),
337      mRawChannel(NULL),
338      mSupportChannel(NULL),
339      mAnalysisChannel(NULL),
340      mRawDumpChannel(NULL),
341      mDummyBatchChannel(NULL),
342      m_perfLock(),
343      mCommon(),
344      mChannelHandle(0),
345      mFirstConfiguration(true),
346      mFlush(false),
347      mFlushPerf(false),
348      mParamHeap(NULL),
349      mParameters(NULL),
350      mPrevParameters(NULL),
351      m_bIsVideo(false),
352      m_bIs4KVideo(false),
353      m_bEisSupportedSize(false),
354      m_bEisEnable(false),
355      m_MobicatMask(0),
356      mMinProcessedFrameDuration(0),
357      mMinJpegFrameDuration(0),
358      mMinRawFrameDuration(0),
359      mMetaFrameCount(0U),
360      mUpdateDebugLevel(false),
361      mCallbacks(callbacks),
362      mCaptureIntent(0),
363      mCacMode(0),
364      mHybridAeEnable(0),
365      /* DevCamDebug metadata internal m control*/
366      mDevCamDebugMetaEnable(0),
367      /* DevCamDebug metadata end */
368      mBatchSize(0),
369      mToBeQueuedVidBufs(0),
370      mHFRVideoFps(DEFAULT_VIDEO_FPS),
371      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
372      mFirstFrameNumberInBatch(0),
373      mNeedSensorRestart(false),
374      mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
375      mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
376      mLdafCalibExist(false),
377      mPowerHintEnabled(false),
378      mLastCustIntentFrmNum(-1),
379      mState(CLOSED),
380      mIsDeviceLinked(false),
381      mIsMainCamera(true),
382      mLinkedCameraId(0),
383      m_pRelCamSyncHeap(NULL),
384      m_pRelCamSyncBuf(NULL)
385{
386    getLogLevel();
387    m_perfLock.lock_init();
388    mCommon.init(gCamCapability[cameraId]);
389    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
390    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
391    mCameraDevice.common.close = close_camera_device;
392    mCameraDevice.ops = &mCameraOps;
393    mCameraDevice.priv = this;
394    gCamCapability[cameraId]->version = CAM_HAL_V3;
395    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
396    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
397    gCamCapability[cameraId]->min_num_pp_bufs = 3;
398
399    pthread_cond_init(&mBuffersCond, NULL);
400
401    pthread_cond_init(&mRequestCond, NULL);
402    mPendingLiveRequest = 0;
403    mCurrentRequestId = -1;
404    pthread_mutex_init(&mMutex, NULL);
405
406    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
407        mDefaultMetadata[i] = NULL;
408
409    // Getting system props of different kinds
410    char prop[PROPERTY_VALUE_MAX];
411    memset(prop, 0, sizeof(prop));
412    property_get("persist.camera.raw.dump", prop, "0");
413    mEnableRawDump = atoi(prop);
414    if (mEnableRawDump)
415        LOGD("Raw dump from Camera HAL enabled");
416
417    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
418    memset(mLdafCalib, 0, sizeof(mLdafCalib));
419
420    memset(prop, 0, sizeof(prop));
421    property_get("persist.camera.tnr.preview", prop, "0");
422    m_bTnrPreview = (uint8_t)atoi(prop);
423
424    memset(prop, 0, sizeof(prop));
425    property_get("persist.camera.tnr.video", prop, "0");
426    m_bTnrVideo = (uint8_t)atoi(prop);
427
428    //Load and read GPU library.
429    lib_surface_utils = NULL;
430    LINK_get_surface_pixel_alignment = NULL;
431    mSurfaceStridePadding = CAM_PAD_TO_32;
432    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
433    if (lib_surface_utils) {
434        *(void **)&LINK_get_surface_pixel_alignment =
435                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
436         if (LINK_get_surface_pixel_alignment) {
437             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
438         }
439         dlclose(lib_surface_utils);
440    }
441}
442
443/*===========================================================================
444 * FUNCTION   : ~QCamera3HardwareInterface
445 *
446 * DESCRIPTION: destructor of QCamera3HardwareInterface
447 *
448 * PARAMETERS : none
449 *
450 * RETURN     : none
451 *==========================================================================*/
452QCamera3HardwareInterface::~QCamera3HardwareInterface()
453{
454    LOGD("E");
455
456    /* Turn off current power hint before acquiring perfLock in case they
457     * conflict with each other */
458    disablePowerHint();
459
460    m_perfLock.lock_acq();
461
462    /* We need to stop all streams before deleting any stream */
463    if (mRawDumpChannel) {
464        mRawDumpChannel->stop();
465    }
466
467    // NOTE: 'camera3_stream_t *' objects are already freed at
468    //        this stage by the framework
469    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
470        it != mStreamInfo.end(); it++) {
471        QCamera3ProcessingChannel *channel = (*it)->channel;
472        if (channel) {
473            channel->stop();
474        }
475    }
476    if (mSupportChannel)
477        mSupportChannel->stop();
478
479    if (mAnalysisChannel) {
480        mAnalysisChannel->stop();
481    }
482    if (mMetadataChannel) {
483        mMetadataChannel->stop();
484    }
485    if (mChannelHandle) {
486        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
487                mChannelHandle);
488        LOGD("stopping channel %d", mChannelHandle);
489    }
490
491    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
492        it != mStreamInfo.end(); it++) {
493        QCamera3ProcessingChannel *channel = (*it)->channel;
494        if (channel)
495            delete channel;
496        free (*it);
497    }
498    if (mSupportChannel) {
499        delete mSupportChannel;
500        mSupportChannel = NULL;
501    }
502
503    if (mAnalysisChannel) {
504        delete mAnalysisChannel;
505        mAnalysisChannel = NULL;
506    }
507    if (mRawDumpChannel) {
508        delete mRawDumpChannel;
509        mRawDumpChannel = NULL;
510    }
511    if (mDummyBatchChannel) {
512        delete mDummyBatchChannel;
513        mDummyBatchChannel = NULL;
514    }
515    mPictureChannel = NULL;
516
517    if (mMetadataChannel) {
518        delete mMetadataChannel;
519        mMetadataChannel = NULL;
520    }
521
522    /* Clean up all channels */
523    if (mCameraInitialized) {
524        if(!mFirstConfiguration){
525            //send the last unconfigure
526            cam_stream_size_info_t stream_config_info;
527            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
528            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
529            stream_config_info.buffer_info.max_buffers =
530                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
531            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
532                    stream_config_info);
533            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
534            if (rc < 0) {
535                LOGE("set_parms failed for unconfigure");
536            }
537        }
538        deinitParameters();
539    }
540
541    if (mChannelHandle) {
542        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
543                mChannelHandle);
544        LOGH("deleting channel %d", mChannelHandle);
545        mChannelHandle = 0;
546    }
547
548    if (mState != CLOSED)
549        closeCamera();
550
551    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
552        req.mPendingBufferList.clear();
553    }
554    mPendingBuffersMap.mPendingBuffersInRequest.clear();
555    mPendingReprocessResultList.clear();
556    for (pendingRequestIterator i = mPendingRequestsList.begin();
557            i != mPendingRequestsList.end();) {
558        i = erasePendingRequest(i);
559    }
560    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
561        if (mDefaultMetadata[i])
562            free_camera_metadata(mDefaultMetadata[i]);
563
564    m_perfLock.lock_rel();
565    m_perfLock.lock_deinit();
566
567    pthread_cond_destroy(&mRequestCond);
568
569    pthread_cond_destroy(&mBuffersCond);
570
571    pthread_mutex_destroy(&mMutex);
572    LOGD("X");
573}
574
575/*===========================================================================
576 * FUNCTION   : erasePendingRequest
577 *
578 * DESCRIPTION: function to erase a desired pending request after freeing any
579 *              allocated memory
580 *
581 * PARAMETERS :
582 *   @i       : iterator pointing to pending request to be erased
583 *
584 * RETURN     : iterator pointing to the next request
585 *==========================================================================*/
586QCamera3HardwareInterface::pendingRequestIterator
587        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
588{
589    if (i->input_buffer != NULL) {
590        free(i->input_buffer);
591        i->input_buffer = NULL;
592    }
593    if (i->settings != NULL)
594        free_camera_metadata((camera_metadata_t*)i->settings);
595    return mPendingRequestsList.erase(i);
596}
597
598/*===========================================================================
599 * FUNCTION   : camEvtHandle
600 *
601 * DESCRIPTION: Function registered to mm-camera-interface to handle events
602 *
603 * PARAMETERS :
604 *   @camera_handle : interface layer camera handle
605 *   @evt           : ptr to event
606 *   @user_data     : user data ptr
607 *
608 * RETURN     : none
609 *==========================================================================*/
610void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
611                                          mm_camera_event_t *evt,
612                                          void *user_data)
613{
614    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
615    if (obj && evt) {
616        switch(evt->server_event_type) {
617            case CAM_EVENT_TYPE_DAEMON_DIED:
618                pthread_mutex_lock(&obj->mMutex);
619                obj->mState = ERROR;
620                pthread_mutex_unlock(&obj->mMutex);
621                LOGE("Fatal, camera daemon died");
622                break;
623
624            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
625                LOGD("HAL got request pull from Daemon");
626                pthread_mutex_lock(&obj->mMutex);
627                obj->mWokenUpByDaemon = true;
628                obj->unblockRequestIfNecessary();
629                pthread_mutex_unlock(&obj->mMutex);
630                break;
631
632            default:
633                LOGW("Warning: Unhandled event %d",
634                        evt->server_event_type);
635                break;
636        }
637    } else {
638        LOGE("NULL user_data/evt");
639    }
640}
641
642/*===========================================================================
643 * FUNCTION   : openCamera
644 *
645 * DESCRIPTION: open camera
646 *
647 * PARAMETERS :
648 *   @hw_device  : double ptr for camera device struct
649 *
650 * RETURN     : int32_t type of status
651 *              NO_ERROR  -- success
652 *              none-zero failure code
653 *==========================================================================*/
654int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
655{
656    int rc = 0;
657    if (mState != CLOSED) {
658        *hw_device = NULL;
659        return PERMISSION_DENIED;
660    }
661
662    m_perfLock.lock_acq();
663    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
664             mCameraId);
665
666    rc = openCamera();
667    if (rc == 0) {
668        *hw_device = &mCameraDevice.common;
669    } else
670        *hw_device = NULL;
671
672    m_perfLock.lock_rel();
673    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
674             mCameraId, rc);
675
676    if (rc == NO_ERROR) {
677        mState = OPENED;
678    }
679    return rc;
680}
681
682/*===========================================================================
683 * FUNCTION   : openCamera
684 *
685 * DESCRIPTION: open camera
686 *
687 * PARAMETERS : none
688 *
689 * RETURN     : int32_t type of status
690 *              NO_ERROR  -- success
691 *              none-zero failure code
692 *==========================================================================*/
693int QCamera3HardwareInterface::openCamera()
694{
695    int rc = 0;
696    char value[PROPERTY_VALUE_MAX];
697
698    KPI_ATRACE_CALL();
699    if (mCameraHandle) {
700        LOGE("Failure: Camera already opened");
701        return ALREADY_EXISTS;
702    }
703
704    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
705    if (rc < 0) {
706        LOGE("Failed to reserve flash for camera id: %d",
707                mCameraId);
708        return UNKNOWN_ERROR;
709    }
710
711    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
712    if (rc) {
713        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
714        return rc;
715    }
716
717    if (!mCameraHandle) {
718        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
719        return -ENODEV;
720    }
721
722    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
723            camEvtHandle, (void *)this);
724
725    if (rc < 0) {
726        LOGE("Error, failed to register event callback");
727        /* Not closing camera here since it is already handled in destructor */
728        return FAILED_TRANSACTION;
729    }
730
731    mExifParams.debug_params =
732            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
733    if (mExifParams.debug_params) {
734        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
735    } else {
736        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
737        return NO_MEMORY;
738    }
739    mFirstConfiguration = true;
740
741    //Notify display HAL that a camera session is active.
742    //But avoid calling the same during bootup because camera service might open/close
743    //cameras at boot time during its initialization and display service will also internally
744    //wait for camera service to initialize first while calling this display API, resulting in a
745    //deadlock situation. Since boot time camera open/close calls are made only to fetch
746    //capabilities, no need of this display bw optimization.
747    //Use "service.bootanim.exit" property to know boot status.
748    property_get("service.bootanim.exit", value, "0");
749    if (atoi(value) == 1) {
750        pthread_mutex_lock(&gCamLock);
751        if (gNumCameraSessions++ == 0) {
752            setCameraLaunchStatus(true);
753        }
754        pthread_mutex_unlock(&gCamLock);
755    }
756
757    //fill the session id needed while linking dual cam
758    pthread_mutex_lock(&gCamLock);
759    rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
760        &sessionId[mCameraId]);
761    pthread_mutex_unlock(&gCamLock);
762
763    if (rc < 0) {
764        LOGE("Error, failed to get sessiion id");
765        return UNKNOWN_ERROR;
766    } else {
767        //Allocate related cam sync buffer
768        //this is needed for the payload that goes along with bundling cmd for related
769        //camera use cases
770        m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
771        rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
772        if(rc != OK) {
773            rc = NO_MEMORY;
774            LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
775            return NO_MEMORY;
776        }
777
778        //Map memory for related cam sync buffer
779        rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
780                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
781                m_pRelCamSyncHeap->getFd(0),
782                sizeof(cam_sync_related_sensors_event_info_t),
783                m_pRelCamSyncHeap->getPtr(0));
784        if(rc < 0) {
785            LOGE("Dualcam: failed to map Related cam sync buffer");
786            rc = FAILED_TRANSACTION;
787            return NO_MEMORY;
788        }
789        m_pRelCamSyncBuf =
790                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
791    }
792
793    LOGH("mCameraId=%d",mCameraId);
794
795    return NO_ERROR;
796}
797
798/*===========================================================================
799 * FUNCTION   : closeCamera
800 *
801 * DESCRIPTION: close camera
802 *
803 * PARAMETERS : none
804 *
805 * RETURN     : int32_t type of status
806 *              NO_ERROR  -- success
807 *              none-zero failure code
808 *==========================================================================*/
809int QCamera3HardwareInterface::closeCamera()
810{
811    KPI_ATRACE_CALL();
812    int rc = NO_ERROR;
813    char value[PROPERTY_VALUE_MAX];
814
815    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
816             mCameraId);
817    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
818    mCameraHandle = NULL;
819
820    //reset session id to some invalid id
821    pthread_mutex_lock(&gCamLock);
822    sessionId[mCameraId] = 0xDEADBEEF;
823    pthread_mutex_unlock(&gCamLock);
824
825    //Notify display HAL that there is no active camera session
826    //but avoid calling the same during bootup. Refer to openCamera
827    //for more details.
828    property_get("service.bootanim.exit", value, "0");
829    if (atoi(value) == 1) {
830        pthread_mutex_lock(&gCamLock);
831        if (--gNumCameraSessions == 0) {
832            setCameraLaunchStatus(false);
833        }
834        pthread_mutex_unlock(&gCamLock);
835    }
836
837    if (NULL != m_pRelCamSyncHeap) {
838        m_pRelCamSyncHeap->deallocate();
839        delete m_pRelCamSyncHeap;
840        m_pRelCamSyncHeap = NULL;
841        m_pRelCamSyncBuf = NULL;
842    }
843
844    if (mExifParams.debug_params) {
845        free(mExifParams.debug_params);
846        mExifParams.debug_params = NULL;
847    }
848    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
849        LOGW("Failed to release flash for camera id: %d",
850                mCameraId);
851    }
852    mState = CLOSED;
853    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
854         mCameraId, rc);
855    return rc;
856}
857
858/*===========================================================================
859 * FUNCTION   : initialize
860 *
861 * DESCRIPTION: Initialize frameworks callback functions
862 *
863 * PARAMETERS :
864 *   @callback_ops : callback function to frameworks
865 *
866 * RETURN     :
867 *
868 *==========================================================================*/
869int QCamera3HardwareInterface::initialize(
870        const struct camera3_callback_ops *callback_ops)
871{
872    ATRACE_CALL();
873    int rc;
874
875    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
876    pthread_mutex_lock(&mMutex);
877
878    // Validate current state
879    switch (mState) {
880        case OPENED:
881            /* valid state */
882            break;
883        default:
884            LOGE("Invalid state %d", mState);
885            rc = -ENODEV;
886            goto err1;
887    }
888
889    rc = initParameters();
890    if (rc < 0) {
891        LOGE("initParamters failed %d", rc);
892        goto err1;
893    }
894    mCallbackOps = callback_ops;
895
896    mChannelHandle = mCameraHandle->ops->add_channel(
897            mCameraHandle->camera_handle, NULL, NULL, this);
898    if (mChannelHandle == 0) {
899        LOGE("add_channel failed");
900        rc = -ENOMEM;
901        pthread_mutex_unlock(&mMutex);
902        return rc;
903    }
904
905    pthread_mutex_unlock(&mMutex);
906    mCameraInitialized = true;
907    mState = INITIALIZED;
908    LOGI("X");
909    return 0;
910
911err1:
912    pthread_mutex_unlock(&mMutex);
913    return rc;
914}
915
916/*===========================================================================
917 * FUNCTION   : validateStreamDimensions
918 *
919 * DESCRIPTION: Check if the configuration requested are those advertised
920 *
921 * PARAMETERS :
922 *   @stream_list : streams to be configured
923 *
924 * RETURN     :
925 *
926 *==========================================================================*/
927int QCamera3HardwareInterface::validateStreamDimensions(
928        camera3_stream_configuration_t *streamList)
929{
930    int rc = NO_ERROR;
931    size_t count = 0;
932
933    camera3_stream_t *inputStream = NULL;
934    /*
935    * Loop through all streams to find input stream if it exists*
936    */
937    for (size_t i = 0; i< streamList->num_streams; i++) {
938        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
939            if (inputStream != NULL) {
940                LOGE("Error, Multiple input streams requested");
941                return -EINVAL;
942            }
943            inputStream = streamList->streams[i];
944        }
945    }
946    /*
947    * Loop through all streams requested in configuration
948    * Check if unsupported sizes have been requested on any of them
949    */
950    for (size_t j = 0; j < streamList->num_streams; j++) {
951        bool sizeFound = false;
952        camera3_stream_t *newStream = streamList->streams[j];
953
954        uint32_t rotatedHeight = newStream->height;
955        uint32_t rotatedWidth = newStream->width;
956        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
957                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
958            rotatedHeight = newStream->width;
959            rotatedWidth = newStream->height;
960        }
961
962        /*
963        * Sizes are different for each type of stream format check against
964        * appropriate table.
965        */
966        switch (newStream->format) {
967        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
968        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
969        case HAL_PIXEL_FORMAT_RAW10:
970            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
971            for (size_t i = 0; i < count; i++) {
972                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
973                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
974                    sizeFound = true;
975                    break;
976                }
977            }
978            break;
979        case HAL_PIXEL_FORMAT_BLOB:
980            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
981            /* Verify set size against generated sizes table */
982            for (size_t i = 0; i < count; i++) {
983                if (((int32_t)rotatedWidth ==
984                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
985                        ((int32_t)rotatedHeight ==
986                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
987                    sizeFound = true;
988                    break;
989                }
990            }
991            break;
992        case HAL_PIXEL_FORMAT_YCbCr_420_888:
993        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
994        default:
995            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
996                    || newStream->stream_type == CAMERA3_STREAM_INPUT
997                    || IS_USAGE_ZSL(newStream->usage)) {
998                if (((int32_t)rotatedWidth ==
999                                gCamCapability[mCameraId]->active_array_size.width) &&
1000                                ((int32_t)rotatedHeight ==
1001                                gCamCapability[mCameraId]->active_array_size.height)) {
1002                    sizeFound = true;
1003                    break;
1004                }
1005                /* We could potentially break here to enforce ZSL stream
1006                 * set from frameworks always is full active array size
1007                 * but it is not clear from the spc if framework will always
1008                 * follow that, also we have logic to override to full array
1009                 * size, so keeping the logic lenient at the moment
1010                 */
1011            }
1012            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1013                    MAX_SIZES_CNT);
1014            for (size_t i = 0; i < count; i++) {
1015                if (((int32_t)rotatedWidth ==
1016                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1017                            ((int32_t)rotatedHeight ==
1018                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1019                    sizeFound = true;
1020                    break;
1021                }
1022            }
1023            break;
1024        } /* End of switch(newStream->format) */
1025
1026        /* We error out even if a single stream has unsupported size set */
1027        if (!sizeFound) {
1028            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1029                    rotatedWidth, rotatedHeight, newStream->format,
1030                    gCamCapability[mCameraId]->active_array_size.width,
1031                    gCamCapability[mCameraId]->active_array_size.height);
1032            rc = -EINVAL;
1033            break;
1034        }
1035    } /* End of for each stream */
1036    return rc;
1037}
1038
1039/*==============================================================================
1040 * FUNCTION   : isSupportChannelNeeded
1041 *
1042 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1043 *
1044 * PARAMETERS :
1045 *   @stream_list : streams to be configured
1046 *   @stream_config_info : the config info for streams to be configured
1047 *
1048 * RETURN     : Boolen true/false decision
1049 *
1050 *==========================================================================*/
1051bool QCamera3HardwareInterface::isSupportChannelNeeded(
1052        camera3_stream_configuration_t *streamList,
1053        cam_stream_size_info_t stream_config_info)
1054{
1055    uint32_t i;
1056    bool pprocRequested = false;
1057    /* Check for conditions where PProc pipeline does not have any streams*/
1058    for (i = 0; i < stream_config_info.num_streams; i++) {
1059        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1060                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1061            pprocRequested = true;
1062            break;
1063        }
1064    }
1065
1066    if (pprocRequested == false )
1067        return true;
1068
1069    /* Dummy stream needed if only raw or jpeg streams present */
1070    for (i = 0; i < streamList->num_streams; i++) {
1071        switch(streamList->streams[i]->format) {
1072            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1073            case HAL_PIXEL_FORMAT_RAW10:
1074            case HAL_PIXEL_FORMAT_RAW16:
1075            case HAL_PIXEL_FORMAT_BLOB:
1076                break;
1077            default:
1078                return false;
1079        }
1080    }
1081    return true;
1082}
1083
1084/*==============================================================================
1085 * FUNCTION   : getSensorOutputSize
1086 *
1087 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1088 *
1089 * PARAMETERS :
1090 *   @sensor_dim : sensor output dimension (output)
1091 *
1092 * RETURN     : int32_t type of status
1093 *              NO_ERROR  -- success
1094 *              none-zero failure code
1095 *
1096 *==========================================================================*/
1097int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1098{
1099    int32_t rc = NO_ERROR;
1100
1101    cam_dimension_t max_dim = {0, 0};
1102    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1103        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1104            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1105        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1106            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1107    }
1108
1109    clear_metadata_buffer(mParameters);
1110
1111    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1112            max_dim);
1113    if (rc != NO_ERROR) {
1114        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1115        return rc;
1116    }
1117
1118    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1119    if (rc != NO_ERROR) {
1120        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1121        return rc;
1122    }
1123
1124    clear_metadata_buffer(mParameters);
1125    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1126
1127    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1128            mParameters);
1129    if (rc != NO_ERROR) {
1130        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1131        return rc;
1132    }
1133
1134    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1135    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1136
1137    return rc;
1138}
1139
1140/*==============================================================================
1141 * FUNCTION   : enablePowerHint
1142 *
1143 * DESCRIPTION: enable single powerhint for preview and different video modes.
1144 *
1145 * PARAMETERS :
1146 *
1147 * RETURN     : NULL
1148 *
1149 *==========================================================================*/
1150void QCamera3HardwareInterface::enablePowerHint()
1151{
1152    if (!mPowerHintEnabled) {
1153        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1154        mPowerHintEnabled = true;
1155    }
1156}
1157
1158/*==============================================================================
1159 * FUNCTION   : disablePowerHint
1160 *
1161 * DESCRIPTION: disable current powerhint.
1162 *
1163 * PARAMETERS :
1164 *
1165 * RETURN     : NULL
1166 *
1167 *==========================================================================*/
1168void QCamera3HardwareInterface::disablePowerHint()
1169{
1170    if (mPowerHintEnabled) {
1171        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1172        mPowerHintEnabled = false;
1173    }
1174}
1175
1176/*==============================================================================
1177 * FUNCTION   : addToPPFeatureMask
1178 *
1179 * DESCRIPTION: add additional features to pp feature mask based on
1180 *              stream type and usecase
1181 *
1182 * PARAMETERS :
1183 *   @stream_format : stream type for feature mask
1184 *   @stream_idx : stream idx within postprocess_mask list to change
1185 *
1186 * RETURN     : NULL
1187 *
1188 *==========================================================================*/
1189void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1190        uint32_t stream_idx)
1191{
1192    char feature_mask_value[PROPERTY_VALUE_MAX];
1193    cam_feature_mask_t feature_mask;
1194    int args_converted;
1195    int property_len;
1196
1197    /* Get feature mask from property */
1198    property_len = property_get("persist.camera.hal3.feature",
1199            feature_mask_value, "0");
1200    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1201            (feature_mask_value[1] == 'x')) {
1202        args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1203    } else {
1204        args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1205    }
1206    if (1 != args_converted) {
1207        feature_mask = 0;
1208        LOGE("Wrong feature mask %s", feature_mask_value);
1209        return;
1210    }
1211
1212    switch (stream_format) {
1213    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1214        /* Add LLVD to pp feature mask only if video hint is enabled */
1215        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1216            mStreamConfigInfo.postprocess_mask[stream_idx]
1217                    |= CAM_QTI_FEATURE_SW_TNR;
1218            LOGH("Added SW TNR to pp feature mask");
1219        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1220            mStreamConfigInfo.postprocess_mask[stream_idx]
1221                    |= CAM_QCOM_FEATURE_LLVD;
1222            LOGH("Added LLVD SeeMore to pp feature mask");
1223        }
1224        break;
1225    }
1226    default:
1227        break;
1228    }
1229    LOGD("PP feature mask %llx",
1230            mStreamConfigInfo.postprocess_mask[stream_idx]);
1231}
1232
1233/*==============================================================================
1234 * FUNCTION   : updateFpsInPreviewBuffer
1235 *
1236 * DESCRIPTION: update FPS information in preview buffer.
1237 *
1238 * PARAMETERS :
1239 *   @metadata    : pointer to metadata buffer
1240 *   @frame_number: frame_number to look for in pending buffer list
1241 *
1242 * RETURN     : None
1243 *
1244 *==========================================================================*/
1245void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1246        uint32_t frame_number)
1247{
1248    // Mark all pending buffers for this particular request
1249    // with corresponding framerate information
1250    for (List<PendingBuffersInRequest>::iterator req =
1251            mPendingBuffersMap.mPendingBuffersInRequest.begin();
1252            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1253        for(List<PendingBufferInfo>::iterator j =
1254                req->mPendingBufferList.begin();
1255                j != req->mPendingBufferList.end(); j++) {
1256            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1257            if ((req->frame_number == frame_number) &&
1258                (channel->getStreamTypeMask() &
1259                (1U << CAM_STREAM_TYPE_PREVIEW))) {
1260                IF_META_AVAILABLE(cam_fps_range_t, float_range,
1261                    CAM_INTF_PARM_FPS_RANGE, metadata) {
1262                    int32_t cameraFps = float_range->max_fps;
1263                    struct private_handle_t *priv_handle =
1264                        (struct private_handle_t *)(*(j->buffer));
1265                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1266                }
1267            }
1268        }
1269    }
1270}
1271
1272/*===========================================================================
1273 * FUNCTION   : configureStreams
1274 *
1275 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1276 *              and output streams.
1277 *
1278 * PARAMETERS :
1279 *   @stream_list : streams to be configured
1280 *
1281 * RETURN     :
1282 *
1283 *==========================================================================*/
1284int QCamera3HardwareInterface::configureStreams(
1285        camera3_stream_configuration_t *streamList)
1286{
1287    ATRACE_CALL();
1288    int rc = 0;
1289
1290    // Acquire perfLock before configure streams
1291    m_perfLock.lock_acq();
1292    rc = configureStreamsPerfLocked(streamList);
1293    m_perfLock.lock_rel();
1294
1295    return rc;
1296}
1297
1298/*===========================================================================
1299 * FUNCTION   : configureStreamsPerfLocked
1300 *
1301 * DESCRIPTION: configureStreams while perfLock is held.
1302 *
1303 * PARAMETERS :
1304 *   @stream_list : streams to be configured
1305 *
1306 * RETURN     : int32_t type of status
1307 *              NO_ERROR  -- success
1308 *              none-zero failure code
1309 *==========================================================================*/
1310int QCamera3HardwareInterface::configureStreamsPerfLocked(
1311        camera3_stream_configuration_t *streamList)
1312{
1313    ATRACE_CALL();
1314    int rc = 0;
1315
1316    // Sanity check stream_list
1317    if (streamList == NULL) {
1318        LOGE("NULL stream configuration");
1319        return BAD_VALUE;
1320    }
1321    if (streamList->streams == NULL) {
1322        LOGE("NULL stream list");
1323        return BAD_VALUE;
1324    }
1325
1326    if (streamList->num_streams < 1) {
1327        LOGE("Bad number of streams requested: %d",
1328                streamList->num_streams);
1329        return BAD_VALUE;
1330    }
1331
1332    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1333        LOGE("Maximum number of streams %d exceeded: %d",
1334                MAX_NUM_STREAMS, streamList->num_streams);
1335        return BAD_VALUE;
1336    }
1337
1338    mOpMode = streamList->operation_mode;
1339    LOGD("mOpMode: %d", mOpMode);
1340
1341    /* first invalidate all the steams in the mStreamList
1342     * if they appear again, they will be validated */
1343    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1344            it != mStreamInfo.end(); it++) {
1345        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1346        if (channel) {
1347          channel->stop();
1348        }
1349        (*it)->status = INVALID;
1350    }
1351
1352    if (mRawDumpChannel) {
1353        mRawDumpChannel->stop();
1354        delete mRawDumpChannel;
1355        mRawDumpChannel = NULL;
1356    }
1357
1358    if (mSupportChannel)
1359        mSupportChannel->stop();
1360
1361    if (mAnalysisChannel) {
1362        mAnalysisChannel->stop();
1363    }
1364    if (mMetadataChannel) {
1365        /* If content of mStreamInfo is not 0, there is metadata stream */
1366        mMetadataChannel->stop();
1367    }
1368    if (mChannelHandle) {
1369        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1370                mChannelHandle);
1371        LOGD("stopping channel %d", mChannelHandle);
1372    }
1373
1374    pthread_mutex_lock(&mMutex);
1375
1376    // Check state
1377    switch (mState) {
1378        case INITIALIZED:
1379        case CONFIGURED:
1380        case STARTED:
1381            /* valid state */
1382            break;
1383        default:
1384            LOGE("Invalid state %d", mState);
1385            pthread_mutex_unlock(&mMutex);
1386            return -ENODEV;
1387    }
1388
1389    /* Check whether we have video stream */
1390    m_bIs4KVideo = false;
1391    m_bIsVideo = false;
1392    m_bEisSupportedSize = false;
1393    m_bTnrEnabled = false;
1394    bool isZsl = false;
1395    uint32_t videoWidth = 0U;
1396    uint32_t videoHeight = 0U;
1397    size_t rawStreamCnt = 0;
1398    size_t stallStreamCnt = 0;
1399    size_t processedStreamCnt = 0;
1400    // Number of streams on ISP encoder path
1401    size_t numStreamsOnEncoder = 0;
1402    size_t numYuv888OnEncoder = 0;
1403    bool bYuv888OverrideJpeg = false;
1404    cam_dimension_t largeYuv888Size = {0, 0};
1405    cam_dimension_t maxViewfinderSize = {0, 0};
1406    bool bJpegExceeds4K = false;
1407    bool bJpegOnEncoder = false;
1408    bool bUseCommonFeatureMask = false;
1409    cam_feature_mask_t commonFeatureMask = 0;
1410    bool bSmallJpegSize = false;
1411    uint32_t width_ratio;
1412    uint32_t height_ratio;
1413    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1414    camera3_stream_t *inputStream = NULL;
1415    bool isJpeg = false;
1416    cam_dimension_t jpegSize = {0, 0};
1417
1418    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1419
1420    /*EIS configuration*/
1421    bool eisSupported = false;
1422    bool oisSupported = false;
1423    int32_t margin_index = -1;
1424    uint8_t eis_prop_set;
1425    uint32_t maxEisWidth = 0;
1426    uint32_t maxEisHeight = 0;
1427
1428    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1429
1430    size_t count = IS_TYPE_MAX;
1431    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1432    for (size_t i = 0; i < count; i++) {
1433        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1434            eisSupported = true;
1435            margin_index = (int32_t)i;
1436            break;
1437        }
1438    }
1439
1440    count = CAM_OPT_STAB_MAX;
1441    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1442    for (size_t i = 0; i < count; i++) {
1443        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1444            oisSupported = true;
1445            break;
1446        }
1447    }
1448
1449    if (eisSupported) {
1450        maxEisWidth = MAX_EIS_WIDTH;
1451        maxEisHeight = MAX_EIS_HEIGHT;
1452    }
1453
1454    /* EIS setprop control */
1455    char eis_prop[PROPERTY_VALUE_MAX];
1456    memset(eis_prop, 0, sizeof(eis_prop));
1457    property_get("persist.camera.eis.enable", eis_prop, "0");
1458    eis_prop_set = (uint8_t)atoi(eis_prop);
1459
1460    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1461            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1462
1463    /* stream configurations */
1464    for (size_t i = 0; i < streamList->num_streams; i++) {
1465        camera3_stream_t *newStream = streamList->streams[i];
1466        LOGI("stream[%d] type = %d, format = %d, width = %d, "
1467                "height = %d, rotation = %d, usage = 0x%x",
1468                 i, newStream->stream_type, newStream->format,
1469                newStream->width, newStream->height, newStream->rotation,
1470                newStream->usage);
1471        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1472                newStream->stream_type == CAMERA3_STREAM_INPUT){
1473            isZsl = true;
1474        }
1475        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1476            inputStream = newStream;
1477        }
1478
1479        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1480            isJpeg = true;
1481            jpegSize.width = newStream->width;
1482            jpegSize.height = newStream->height;
1483            if (newStream->width > VIDEO_4K_WIDTH ||
1484                    newStream->height > VIDEO_4K_HEIGHT)
1485                bJpegExceeds4K = true;
1486        }
1487
1488        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1489                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1490            m_bIsVideo = true;
1491            videoWidth = newStream->width;
1492            videoHeight = newStream->height;
1493            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1494                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1495                m_bIs4KVideo = true;
1496            }
1497            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1498                                  (newStream->height <= maxEisHeight);
1499        }
1500        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1501                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1502            switch (newStream->format) {
1503            case HAL_PIXEL_FORMAT_BLOB:
1504                stallStreamCnt++;
1505                if (isOnEncoder(maxViewfinderSize, newStream->width,
1506                        newStream->height)) {
1507                    numStreamsOnEncoder++;
1508                    bJpegOnEncoder = true;
1509                }
1510                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1511                        newStream->width);
1512                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1513                        newStream->height);;
1514                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1515                        "FATAL: max_downscale_factor cannot be zero and so assert");
1516                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1517                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1518                    LOGH("Setting small jpeg size flag to true");
1519                    bSmallJpegSize = true;
1520                }
1521                break;
1522            case HAL_PIXEL_FORMAT_RAW10:
1523            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1524            case HAL_PIXEL_FORMAT_RAW16:
1525                rawStreamCnt++;
1526                break;
1527            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1528                processedStreamCnt++;
1529                if (isOnEncoder(maxViewfinderSize, newStream->width,
1530                        newStream->height)) {
1531                    if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1532                            !IS_USAGE_ZSL(newStream->usage)) {
1533                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1534                    }
1535                    numStreamsOnEncoder++;
1536                }
1537                break;
1538            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1539                processedStreamCnt++;
1540                if (isOnEncoder(maxViewfinderSize, newStream->width,
1541                        newStream->height)) {
1542                    // If Yuv888 size is not greater than 4K, set feature mask
1543                    // to SUPERSET so that it support concurrent request on
1544                    // YUV and JPEG.
1545                    if (newStream->width <= VIDEO_4K_WIDTH &&
1546                            newStream->height <= VIDEO_4K_HEIGHT) {
1547                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1548                    }
1549                    numStreamsOnEncoder++;
1550                    numYuv888OnEncoder++;
1551                    largeYuv888Size.width = newStream->width;
1552                    largeYuv888Size.height = newStream->height;
1553                }
1554                break;
1555            default:
1556                processedStreamCnt++;
1557                if (isOnEncoder(maxViewfinderSize, newStream->width,
1558                        newStream->height)) {
1559                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1560                    numStreamsOnEncoder++;
1561                }
1562                break;
1563            }
1564
1565        }
1566    }
1567
1568    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1569        !m_bIsVideo) {
1570        m_bEisEnable = false;
1571    }
1572
1573    /* Logic to enable/disable TNR based on specific config size/etc.*/
1574    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1575            ((videoWidth == 1920 && videoHeight == 1080) ||
1576            (videoWidth == 1280 && videoHeight == 720)) &&
1577            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1578        m_bTnrEnabled = true;
1579
1580    /* Check if num_streams is sane */
1581    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1582            rawStreamCnt > MAX_RAW_STREAMS ||
1583            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1584        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1585                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1586        pthread_mutex_unlock(&mMutex);
1587        return -EINVAL;
1588    }
1589    /* Check whether we have zsl stream or 4k video case */
1590    if (isZsl && m_bIsVideo) {
1591        LOGE("Currently invalid configuration ZSL&Video!");
1592        pthread_mutex_unlock(&mMutex);
1593        return -EINVAL;
1594    }
1595    /* Check if stream sizes are sane */
1596    if (numStreamsOnEncoder > 2) {
1597        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1598        pthread_mutex_unlock(&mMutex);
1599        return -EINVAL;
1600    } else if (1 < numStreamsOnEncoder){
1601        bUseCommonFeatureMask = true;
1602        LOGH("Multiple streams above max viewfinder size, common mask needed");
1603    }
1604
1605    /* Check if BLOB size is greater than 4k in 4k recording case */
1606    if (m_bIs4KVideo && bJpegExceeds4K) {
1607        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1608        pthread_mutex_unlock(&mMutex);
1609        return -EINVAL;
1610    }
1611
1612    // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1613    // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1614    // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1615    // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1616    // configurations:
1617    //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1618    //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1619    //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1620    if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1621        ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1622                __func__);
1623        pthread_mutex_unlock(&mMutex);
1624        return -EINVAL;
1625    }
1626
1627    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1628    // the YUV stream's size is greater or equal to the JPEG size, set common
1629    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1630    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1631            jpegSize.width, jpegSize.height) &&
1632            largeYuv888Size.width > jpegSize.width &&
1633            largeYuv888Size.height > jpegSize.height) {
1634        bYuv888OverrideJpeg = true;
1635    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1636        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1637    }
1638
1639    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1640            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1641            commonFeatureMask);
1642    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1643            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1644
1645    rc = validateStreamDimensions(streamList);
1646    if (rc == NO_ERROR) {
1647        rc = validateStreamRotations(streamList);
1648    }
1649    if (rc != NO_ERROR) {
1650        LOGE("Invalid stream configuration requested!");
1651        pthread_mutex_unlock(&mMutex);
1652        return rc;
1653    }
1654
1655    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1656    camera3_stream_t *jpegStream = NULL;
1657    for (size_t i = 0; i < streamList->num_streams; i++) {
1658        camera3_stream_t *newStream = streamList->streams[i];
1659        LOGH("newStream type = %d, stream format = %d "
1660                "stream size : %d x %d, stream rotation = %d",
1661                 newStream->stream_type, newStream->format,
1662                newStream->width, newStream->height, newStream->rotation);
1663        //if the stream is in the mStreamList validate it
1664        bool stream_exists = false;
1665        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1666                it != mStreamInfo.end(); it++) {
1667            if ((*it)->stream == newStream) {
1668                QCamera3ProcessingChannel *channel =
1669                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1670                stream_exists = true;
1671                if (channel)
1672                    delete channel;
1673                (*it)->status = VALID;
1674                (*it)->stream->priv = NULL;
1675                (*it)->channel = NULL;
1676            }
1677        }
1678        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1679            //new stream
1680            stream_info_t* stream_info;
1681            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1682            if (!stream_info) {
1683               LOGE("Could not allocate stream info");
1684               rc = -ENOMEM;
1685               pthread_mutex_unlock(&mMutex);
1686               return rc;
1687            }
1688            stream_info->stream = newStream;
1689            stream_info->status = VALID;
1690            stream_info->channel = NULL;
1691            mStreamInfo.push_back(stream_info);
1692        }
1693        /* Covers Opaque ZSL and API1 F/W ZSL */
1694        if (IS_USAGE_ZSL(newStream->usage)
1695                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1696            if (zslStream != NULL) {
1697                LOGE("Multiple input/reprocess streams requested!");
1698                pthread_mutex_unlock(&mMutex);
1699                return BAD_VALUE;
1700            }
1701            zslStream = newStream;
1702        }
1703        /* Covers YUV reprocess */
1704        if (inputStream != NULL) {
1705            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1706                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1707                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1708                    && inputStream->width == newStream->width
1709                    && inputStream->height == newStream->height) {
1710                if (zslStream != NULL) {
1711                    /* This scenario indicates multiple YUV streams with same size
1712                     * as input stream have been requested, since zsl stream handle
1713                     * is solely use for the purpose of overriding the size of streams
1714                     * which share h/w streams we will just make a guess here as to
1715                     * which of the stream is a ZSL stream, this will be refactored
1716                     * once we make generic logic for streams sharing encoder output
1717                     */
1718                    LOGH("Warning, Multiple ip/reprocess streams requested!");
1719                }
1720                zslStream = newStream;
1721            }
1722        }
1723        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1724            jpegStream = newStream;
1725        }
1726    }
1727
1728    /* If a zsl stream is set, we know that we have configured at least one input or
1729       bidirectional stream */
1730    if (NULL != zslStream) {
1731        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1732        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1733        mInputStreamInfo.format = zslStream->format;
1734        mInputStreamInfo.usage = zslStream->usage;
1735        LOGD("Input stream configured! %d x %d, format %d, usage %d",
1736                 mInputStreamInfo.dim.width,
1737                mInputStreamInfo.dim.height,
1738                mInputStreamInfo.format, mInputStreamInfo.usage);
1739    }
1740
1741    cleanAndSortStreamInfo();
1742    if (mMetadataChannel) {
1743        delete mMetadataChannel;
1744        mMetadataChannel = NULL;
1745    }
1746    if (mSupportChannel) {
1747        delete mSupportChannel;
1748        mSupportChannel = NULL;
1749    }
1750
1751    if (mAnalysisChannel) {
1752        delete mAnalysisChannel;
1753        mAnalysisChannel = NULL;
1754    }
1755
1756    if (mDummyBatchChannel) {
1757        delete mDummyBatchChannel;
1758        mDummyBatchChannel = NULL;
1759    }
1760
1761    //Create metadata channel and initialize it
1762    cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1763    setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1764            gCamCapability[mCameraId]->color_arrangement);
1765    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1766                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1767                    &padding_info, metadataFeatureMask, this);
1768    if (mMetadataChannel == NULL) {
1769        LOGE("failed to allocate metadata channel");
1770        rc = -ENOMEM;
1771        pthread_mutex_unlock(&mMutex);
1772        return rc;
1773    }
1774    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1775    if (rc < 0) {
1776        LOGE("metadata channel initialization failed");
1777        delete mMetadataChannel;
1778        mMetadataChannel = NULL;
1779        pthread_mutex_unlock(&mMutex);
1780        return rc;
1781    }
1782
1783    // Create analysis stream all the time, even when h/w support is not available
1784    {
1785        cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1786        setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1787                gCamCapability[mCameraId]->color_arrangement);
1788        cam_analysis_info_t analysisInfo;
1789        rc = mCommon.getAnalysisInfo(
1790                FALSE,
1791                TRUE,
1792                analysisFeatureMask,
1793                &analysisInfo);
1794        if (rc != NO_ERROR) {
1795            LOGE("getAnalysisInfo failed, ret = %d", rc);
1796            pthread_mutex_unlock(&mMutex);
1797            return rc;
1798        }
1799
1800        mAnalysisChannel = new QCamera3SupportChannel(
1801                mCameraHandle->camera_handle,
1802                mChannelHandle,
1803                mCameraHandle->ops,
1804                &analysisInfo.analysis_padding_info,
1805                analysisFeatureMask,
1806                CAM_STREAM_TYPE_ANALYSIS,
1807                &analysisInfo.analysis_max_res,
1808                (analysisInfo.analysis_format
1809                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1810                : CAM_FORMAT_YUV_420_NV21),
1811                analysisInfo.hw_analysis_supported,
1812                this,
1813                0); // force buffer count to 0
1814        if (!mAnalysisChannel) {
1815            LOGE("H/W Analysis channel cannot be created");
1816            pthread_mutex_unlock(&mMutex);
1817            return -ENOMEM;
1818        }
1819    }
1820
1821    bool isRawStreamRequested = false;
1822    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1823    /* Allocate channel objects for the requested streams */
1824    for (size_t i = 0; i < streamList->num_streams; i++) {
1825        camera3_stream_t *newStream = streamList->streams[i];
1826        uint32_t stream_usage = newStream->usage;
1827        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1828        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1829        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1830                || IS_USAGE_ZSL(newStream->usage)) &&
1831            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1832            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1833            if (bUseCommonFeatureMask) {
1834                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1835                        commonFeatureMask;
1836            } else {
1837                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1838                        CAM_QCOM_FEATURE_NONE;
1839            }
1840
1841        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1842                LOGH("Input stream configured, reprocess config");
1843        } else {
1844            //for non zsl streams find out the format
1845            switch (newStream->format) {
1846            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1847            {
1848                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1849                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1850                /* add additional features to pp feature mask */
1851                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1852                        mStreamConfigInfo.num_streams);
1853
1854                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1855                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1856                                CAM_STREAM_TYPE_VIDEO;
1857                    if (m_bTnrEnabled && m_bTnrVideo) {
1858                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1859                            CAM_QCOM_FEATURE_CPP_TNR;
1860                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1861                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1862                                ~CAM_QCOM_FEATURE_CDS;
1863                    }
1864                } else {
1865                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1866                            CAM_STREAM_TYPE_PREVIEW;
1867                    if (m_bTnrEnabled && m_bTnrPreview) {
1868                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1869                                CAM_QCOM_FEATURE_CPP_TNR;
1870                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1871                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1872                                ~CAM_QCOM_FEATURE_CDS;
1873                    }
1874                    padding_info.width_padding = mSurfaceStridePadding;
1875                    padding_info.height_padding = CAM_PAD_TO_2;
1876                }
1877                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1878                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1879                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1880                            newStream->height;
1881                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1882                            newStream->width;
1883                }
1884            }
1885            break;
1886            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1887                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1888                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1889                    if (bUseCommonFeatureMask)
1890                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1891                                commonFeatureMask;
1892                    else
1893                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1894                                CAM_QCOM_FEATURE_NONE;
1895                } else {
1896                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1897                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1898                }
1899            break;
1900            case HAL_PIXEL_FORMAT_BLOB:
1901                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1902                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1903                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1904                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1905                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1906                } else {
1907                    if (bUseCommonFeatureMask &&
1908                            isOnEncoder(maxViewfinderSize, newStream->width,
1909                            newStream->height)) {
1910                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1911                    } else {
1912                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1913                    }
1914                }
1915                if (isZsl) {
1916                    if (zslStream) {
1917                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1918                                (int32_t)zslStream->width;
1919                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1920                                (int32_t)zslStream->height;
1921                    } else {
1922                        LOGE("Error, No ZSL stream identified");
1923                        pthread_mutex_unlock(&mMutex);
1924                        return -EINVAL;
1925                    }
1926                } else if (m_bIs4KVideo) {
1927                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1928                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1929                } else if (bYuv888OverrideJpeg) {
1930                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1931                            (int32_t)largeYuv888Size.width;
1932                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1933                            (int32_t)largeYuv888Size.height;
1934                }
1935                break;
1936            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1937            case HAL_PIXEL_FORMAT_RAW16:
1938            case HAL_PIXEL_FORMAT_RAW10:
1939                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1940                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1941                isRawStreamRequested = true;
1942                break;
1943            default:
1944                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1945                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1946                break;
1947            }
1948        }
1949
1950        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1951                (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1952                gCamCapability[mCameraId]->color_arrangement);
1953
1954        if (newStream->priv == NULL) {
1955            //New stream, construct channel
1956            switch (newStream->stream_type) {
1957            case CAMERA3_STREAM_INPUT:
1958                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1959                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1960                break;
1961            case CAMERA3_STREAM_BIDIRECTIONAL:
1962                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1963                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1964                break;
1965            case CAMERA3_STREAM_OUTPUT:
1966                /* For video encoding stream, set read/write rarely
1967                 * flag so that they may be set to un-cached */
1968                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1969                    newStream->usage |=
1970                         (GRALLOC_USAGE_SW_READ_RARELY |
1971                         GRALLOC_USAGE_SW_WRITE_RARELY |
1972                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1973                else if (IS_USAGE_ZSL(newStream->usage))
1974                {
1975                    LOGD("ZSL usage flag skipping");
1976                }
1977                else if (newStream == zslStream
1978                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1979                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1980                } else
1981                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1982                break;
1983            default:
1984                LOGE("Invalid stream_type %d", newStream->stream_type);
1985                break;
1986            }
1987
1988            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1989                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1990                QCamera3ProcessingChannel *channel = NULL;
1991                switch (newStream->format) {
1992                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1993                    if ((newStream->usage &
1994                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1995                            (streamList->operation_mode ==
1996                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1997                    ) {
1998                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1999                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2000                                &gCamCapability[mCameraId]->padding_info,
2001                                this,
2002                                newStream,
2003                                (cam_stream_type_t)
2004                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2005                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2006                                mMetadataChannel,
2007                                0); //heap buffers are not required for HFR video channel
2008                        if (channel == NULL) {
2009                            LOGE("allocation of channel failed");
2010                            pthread_mutex_unlock(&mMutex);
2011                            return -ENOMEM;
2012                        }
2013                        //channel->getNumBuffers() will return 0 here so use
2014                        //MAX_INFLIGH_HFR_REQUESTS
2015                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2016                        newStream->priv = channel;
2017                        LOGI("num video buffers in HFR mode: %d",
2018                                 MAX_INFLIGHT_HFR_REQUESTS);
2019                    } else {
2020                        /* Copy stream contents in HFR preview only case to create
2021                         * dummy batch channel so that sensor streaming is in
2022                         * HFR mode */
2023                        if (!m_bIsVideo && (streamList->operation_mode ==
2024                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2025                            mDummyBatchStream = *newStream;
2026                        }
2027                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2028                                mChannelHandle, mCameraHandle->ops, captureResultCb,
2029                                &gCamCapability[mCameraId]->padding_info,
2030                                this,
2031                                newStream,
2032                                (cam_stream_type_t)
2033                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2034                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2035                                mMetadataChannel,
2036                                MAX_INFLIGHT_REQUESTS);
2037                        if (channel == NULL) {
2038                            LOGE("allocation of channel failed");
2039                            pthread_mutex_unlock(&mMutex);
2040                            return -ENOMEM;
2041                        }
2042                        newStream->max_buffers = channel->getNumBuffers();
2043                        newStream->priv = channel;
2044                    }
2045                    break;
2046                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2047                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2048                            mChannelHandle,
2049                            mCameraHandle->ops, captureResultCb,
2050                            &padding_info,
2051                            this,
2052                            newStream,
2053                            (cam_stream_type_t)
2054                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2055                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2056                            mMetadataChannel);
2057                    if (channel == NULL) {
2058                        LOGE("allocation of YUV channel failed");
2059                        pthread_mutex_unlock(&mMutex);
2060                        return -ENOMEM;
2061                    }
2062                    newStream->max_buffers = channel->getNumBuffers();
2063                    newStream->priv = channel;
2064                    break;
2065                }
2066                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2067                case HAL_PIXEL_FORMAT_RAW16:
2068                case HAL_PIXEL_FORMAT_RAW10:
2069                    mRawChannel = new QCamera3RawChannel(
2070                            mCameraHandle->camera_handle, mChannelHandle,
2071                            mCameraHandle->ops, captureResultCb,
2072                            &padding_info,
2073                            this, newStream,
2074                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2075                            mMetadataChannel,
2076                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2077                    if (mRawChannel == NULL) {
2078                        LOGE("allocation of raw channel failed");
2079                        pthread_mutex_unlock(&mMutex);
2080                        return -ENOMEM;
2081                    }
2082                    newStream->max_buffers = mRawChannel->getNumBuffers();
2083                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2084                    break;
2085                case HAL_PIXEL_FORMAT_BLOB:
2086                    // Max live snapshot inflight buffer is 1. This is to mitigate
2087                    // frame drop issues for video snapshot. The more buffers being
2088                    // allocated, the more frame drops there are.
2089                    mPictureChannel = new QCamera3PicChannel(
2090                            mCameraHandle->camera_handle, mChannelHandle,
2091                            mCameraHandle->ops, captureResultCb,
2092                            &padding_info, this, newStream,
2093                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2094                            m_bIs4KVideo, isZsl, mMetadataChannel,
2095                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2096                    if (mPictureChannel == NULL) {
2097                        LOGE("allocation of channel failed");
2098                        pthread_mutex_unlock(&mMutex);
2099                        return -ENOMEM;
2100                    }
2101                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2102                    newStream->max_buffers = mPictureChannel->getNumBuffers();
2103                    mPictureChannel->overrideYuvSize(
2104                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2105                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2106                    break;
2107
2108                default:
2109                    LOGE("not a supported format 0x%x", newStream->format);
2110                    break;
2111                }
2112            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2113                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2114            } else {
2115                LOGE("Error, Unknown stream type");
2116                pthread_mutex_unlock(&mMutex);
2117                return -EINVAL;
2118            }
2119
2120            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2121            if (channel != NULL && channel->isUBWCEnabled()) {
2122                cam_format_t fmt = channel->getStreamDefaultFormat(
2123                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
2124                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2125                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2126                }
2127            }
2128
2129            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2130                    it != mStreamInfo.end(); it++) {
2131                if ((*it)->stream == newStream) {
2132                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2133                    break;
2134                }
2135            }
2136        } else {
2137            // Channel already exists for this stream
2138            // Do nothing for now
2139        }
2140        padding_info = gCamCapability[mCameraId]->padding_info;
2141
2142        /* Do not add entries for input stream in metastream info
2143         * since there is no real stream associated with it
2144         */
2145        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2146            mStreamConfigInfo.num_streams++;
2147    }
2148
2149    //RAW DUMP channel
2150    if (mEnableRawDump && isRawStreamRequested == false){
2151        cam_dimension_t rawDumpSize;
2152        rawDumpSize = getMaxRawSize(mCameraId);
2153        cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2154        setPAAFSupport(rawDumpFeatureMask,
2155                CAM_STREAM_TYPE_RAW,
2156                gCamCapability[mCameraId]->color_arrangement);
2157        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2158                                  mChannelHandle,
2159                                  mCameraHandle->ops,
2160                                  rawDumpSize,
2161                                  &padding_info,
2162                                  this, rawDumpFeatureMask);
2163        if (!mRawDumpChannel) {
2164            LOGE("Raw Dump channel cannot be created");
2165            pthread_mutex_unlock(&mMutex);
2166            return -ENOMEM;
2167        }
2168    }
2169
2170
2171    if (mAnalysisChannel) {
2172        cam_analysis_info_t analysisInfo;
2173        memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2174        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2175                CAM_STREAM_TYPE_ANALYSIS;
2176        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2177                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2178        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2179                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2180                gCamCapability[mCameraId]->color_arrangement);
2181        rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2182                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2183                &analysisInfo);
2184        if (rc != NO_ERROR) {
2185            LOGE("getAnalysisInfo failed, ret = %d", rc);
2186            pthread_mutex_unlock(&mMutex);
2187            return rc;
2188        }
2189        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2190                analysisInfo.analysis_max_res;
2191        mStreamConfigInfo.num_streams++;
2192    }
2193
2194    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2195        cam_analysis_info_t supportInfo;
2196        memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2197        cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2198        setPAAFSupport(callbackFeatureMask,
2199                CAM_STREAM_TYPE_CALLBACK,
2200                gCamCapability[mCameraId]->color_arrangement);
2201        rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2202        if (rc != NO_ERROR) {
2203            LOGE("getAnalysisInfo failed, ret = %d", rc);
2204            pthread_mutex_unlock(&mMutex);
2205            return rc;
2206        }
2207        mSupportChannel = new QCamera3SupportChannel(
2208                mCameraHandle->camera_handle,
2209                mChannelHandle,
2210                mCameraHandle->ops,
2211                &gCamCapability[mCameraId]->padding_info,
2212                callbackFeatureMask,
2213                CAM_STREAM_TYPE_CALLBACK,
2214                &QCamera3SupportChannel::kDim,
2215                CAM_FORMAT_YUV_420_NV21,
2216                supportInfo.hw_analysis_supported,
2217                this);
2218        if (!mSupportChannel) {
2219            LOGE("dummy channel cannot be created");
2220            pthread_mutex_unlock(&mMutex);
2221            return -ENOMEM;
2222        }
2223    }
2224
2225    if (mSupportChannel) {
2226        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2227                QCamera3SupportChannel::kDim;
2228        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2229                CAM_STREAM_TYPE_CALLBACK;
2230        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2231                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2232        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2233                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2234                gCamCapability[mCameraId]->color_arrangement);
2235        mStreamConfigInfo.num_streams++;
2236    }
2237
2238    if (mRawDumpChannel) {
2239        cam_dimension_t rawSize;
2240        rawSize = getMaxRawSize(mCameraId);
2241        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2242                rawSize;
2243        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2244                CAM_STREAM_TYPE_RAW;
2245        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2246                CAM_QCOM_FEATURE_NONE;
2247        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2248                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2249                gCamCapability[mCameraId]->color_arrangement);
2250        mStreamConfigInfo.num_streams++;
2251    }
2252    /* In HFR mode, if video stream is not added, create a dummy channel so that
2253     * ISP can create a batch mode even for preview only case. This channel is
2254     * never 'start'ed (no stream-on), it is only 'initialized'  */
2255    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2256            !m_bIsVideo) {
2257        cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2258        setPAAFSupport(dummyFeatureMask,
2259                CAM_STREAM_TYPE_VIDEO,
2260                gCamCapability[mCameraId]->color_arrangement);
2261        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2262                mChannelHandle,
2263                mCameraHandle->ops, captureResultCb,
2264                &gCamCapability[mCameraId]->padding_info,
2265                this,
2266                &mDummyBatchStream,
2267                CAM_STREAM_TYPE_VIDEO,
2268                dummyFeatureMask,
2269                mMetadataChannel);
2270        if (NULL == mDummyBatchChannel) {
2271            LOGE("creation of mDummyBatchChannel failed."
2272                    "Preview will use non-hfr sensor mode ");
2273        }
2274    }
2275    if (mDummyBatchChannel) {
2276        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2277                mDummyBatchStream.width;
2278        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2279                mDummyBatchStream.height;
2280        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2281                CAM_STREAM_TYPE_VIDEO;
2282        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2283                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2284        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2285                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2286                gCamCapability[mCameraId]->color_arrangement);
2287        mStreamConfigInfo.num_streams++;
2288    }
2289
2290    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2291    mStreamConfigInfo.buffer_info.max_buffers =
2292            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2293
2294    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2295    for (pendingRequestIterator i = mPendingRequestsList.begin();
2296            i != mPendingRequestsList.end();) {
2297        i = erasePendingRequest(i);
2298    }
2299    mPendingFrameDropList.clear();
2300    // Initialize/Reset the pending buffers list
2301    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2302        req.mPendingBufferList.clear();
2303    }
2304    mPendingBuffersMap.mPendingBuffersInRequest.clear();
2305
2306    mPendingReprocessResultList.clear();
2307
2308    mCurJpegMeta.clear();
2309    //Get min frame duration for this streams configuration
2310    deriveMinFrameDuration();
2311
2312    // Update state
2313    mState = CONFIGURED;
2314
2315    pthread_mutex_unlock(&mMutex);
2316
2317    return rc;
2318}
2319
2320/*===========================================================================
2321 * FUNCTION   : validateCaptureRequest
2322 *
2323 * DESCRIPTION: validate a capture request from camera service
2324 *
2325 * PARAMETERS :
2326 *   @request : request from framework to process
2327 *
2328 * RETURN     :
2329 *
2330 *==========================================================================*/
2331int QCamera3HardwareInterface::validateCaptureRequest(
2332                    camera3_capture_request_t *request)
2333{
2334    ssize_t idx = 0;
2335    const camera3_stream_buffer_t *b;
2336    CameraMetadata meta;
2337
2338    /* Sanity check the request */
2339    if (request == NULL) {
2340        LOGE("NULL capture request");
2341        return BAD_VALUE;
2342    }
2343
2344    if ((request->settings == NULL) && (mState == CONFIGURED)) {
2345        /*settings cannot be null for the first request*/
2346        return BAD_VALUE;
2347    }
2348
2349    uint32_t frameNumber = request->frame_number;
2350    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2351        LOGE("Request %d: No output buffers provided!",
2352                __FUNCTION__, frameNumber);
2353        return BAD_VALUE;
2354    }
2355    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2356        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2357                 request->num_output_buffers, MAX_NUM_STREAMS);
2358        return BAD_VALUE;
2359    }
2360    if (request->input_buffer != NULL) {
2361        b = request->input_buffer;
2362        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2363            LOGE("Request %d: Buffer %ld: Status not OK!",
2364                     frameNumber, (long)idx);
2365            return BAD_VALUE;
2366        }
2367        if (b->release_fence != -1) {
2368            LOGE("Request %d: Buffer %ld: Has a release fence!",
2369                     frameNumber, (long)idx);
2370            return BAD_VALUE;
2371        }
2372        if (b->buffer == NULL) {
2373            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2374                     frameNumber, (long)idx);
2375            return BAD_VALUE;
2376        }
2377    }
2378
2379    // Validate all buffers
2380    b = request->output_buffers;
2381    do {
2382        QCamera3ProcessingChannel *channel =
2383                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2384        if (channel == NULL) {
2385            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2386                     frameNumber, (long)idx);
2387            return BAD_VALUE;
2388        }
2389        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2390            LOGE("Request %d: Buffer %ld: Status not OK!",
2391                     frameNumber, (long)idx);
2392            return BAD_VALUE;
2393        }
2394        if (b->release_fence != -1) {
2395            LOGE("Request %d: Buffer %ld: Has a release fence!",
2396                     frameNumber, (long)idx);
2397            return BAD_VALUE;
2398        }
2399        if (b->buffer == NULL) {
2400            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2401                     frameNumber, (long)idx);
2402            return BAD_VALUE;
2403        }
2404        if (*(b->buffer) == NULL) {
2405            LOGE("Request %d: Buffer %ld: NULL private handle!",
2406                     frameNumber, (long)idx);
2407            return BAD_VALUE;
2408        }
2409        idx++;
2410        b = request->output_buffers + idx;
2411    } while (idx < (ssize_t)request->num_output_buffers);
2412
2413    return NO_ERROR;
2414}
2415
2416/*===========================================================================
2417 * FUNCTION   : deriveMinFrameDuration
2418 *
2419 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2420 *              on currently configured streams.
2421 *
2422 * PARAMETERS : NONE
2423 *
2424 * RETURN     : NONE
2425 *
2426 *==========================================================================*/
2427void QCamera3HardwareInterface::deriveMinFrameDuration()
2428{
2429    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2430
2431    maxJpegDim = 0;
2432    maxProcessedDim = 0;
2433    maxRawDim = 0;
2434
2435    // Figure out maximum jpeg, processed, and raw dimensions
2436    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2437        it != mStreamInfo.end(); it++) {
2438
2439        // Input stream doesn't have valid stream_type
2440        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2441            continue;
2442
2443        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2444        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2445            if (dimension > maxJpegDim)
2446                maxJpegDim = dimension;
2447        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2448                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2449                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2450            if (dimension > maxRawDim)
2451                maxRawDim = dimension;
2452        } else {
2453            if (dimension > maxProcessedDim)
2454                maxProcessedDim = dimension;
2455        }
2456    }
2457
2458    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2459            MAX_SIZES_CNT);
2460
2461    //Assume all jpeg dimensions are in processed dimensions.
2462    if (maxJpegDim > maxProcessedDim)
2463        maxProcessedDim = maxJpegDim;
2464    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2465    if (maxProcessedDim > maxRawDim) {
2466        maxRawDim = INT32_MAX;
2467
2468        for (size_t i = 0; i < count; i++) {
2469            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2470                    gCamCapability[mCameraId]->raw_dim[i].height;
2471            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2472                maxRawDim = dimension;
2473        }
2474    }
2475
2476    //Find minimum durations for processed, jpeg, and raw
2477    for (size_t i = 0; i < count; i++) {
2478        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2479                gCamCapability[mCameraId]->raw_dim[i].height) {
2480            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2481            break;
2482        }
2483    }
2484    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2485    for (size_t i = 0; i < count; i++) {
2486        if (maxProcessedDim ==
2487                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2488                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2489            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2490            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2491            break;
2492        }
2493    }
2494}
2495
2496/*===========================================================================
2497 * FUNCTION   : getMinFrameDuration
2498 *
2499 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2500 *              and current request configuration.
2501 *
2502 * PARAMETERS : @request: requset sent by the frameworks
2503 *
2504 * RETURN     : min farme duration for a particular request
2505 *
2506 *==========================================================================*/
2507int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2508{
2509    bool hasJpegStream = false;
2510    bool hasRawStream = false;
2511    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2512        const camera3_stream_t *stream = request->output_buffers[i].stream;
2513        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2514            hasJpegStream = true;
2515        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2516                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2517                stream->format == HAL_PIXEL_FORMAT_RAW16)
2518            hasRawStream = true;
2519    }
2520
2521    if (!hasJpegStream)
2522        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2523    else
2524        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2525}
2526
2527/*===========================================================================
2528 * FUNCTION   : handleBuffersDuringFlushLock
2529 *
2530 * DESCRIPTION: Account for buffers returned from back-end during flush
2531 *              This function is executed while mMutex is held by the caller.
2532 *
2533 * PARAMETERS :
2534 *   @buffer: image buffer for the callback
2535 *
2536 * RETURN     :
2537 *==========================================================================*/
2538void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2539{
2540    bool buffer_found = false;
2541    for (List<PendingBuffersInRequest>::iterator req =
2542            mPendingBuffersMap.mPendingBuffersInRequest.begin();
2543            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2544        for (List<PendingBufferInfo>::iterator i =
2545                req->mPendingBufferList.begin();
2546                i != req->mPendingBufferList.end(); i++) {
2547            if (i->buffer == buffer->buffer) {
2548                mPendingBuffersMap.numPendingBufsAtFlush--;
2549                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2550                    buffer->buffer, req->frame_number,
2551                    mPendingBuffersMap.numPendingBufsAtFlush);
2552                buffer_found = true;
2553                break;
2554            }
2555        }
2556        if (buffer_found) {
2557            break;
2558        }
2559    }
2560    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2561        //signal the flush()
2562        LOGD("All buffers returned to HAL. Continue flush");
2563        pthread_cond_signal(&mBuffersCond);
2564    }
2565}
2566
2567
2568/*===========================================================================
2569 * FUNCTION   : handlePendingReprocResults
2570 *
2571 * DESCRIPTION: check and notify on any pending reprocess results
2572 *
2573 * PARAMETERS :
2574 *   @frame_number   : Pending request frame number
2575 *
2576 * RETURN     : int32_t type of status
2577 *              NO_ERROR  -- success
2578 *              none-zero failure code
2579 *==========================================================================*/
2580int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2581{
2582    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2583            j != mPendingReprocessResultList.end(); j++) {
2584        if (j->frame_number == frame_number) {
2585            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2586
2587            LOGD("Delayed reprocess notify %d",
2588                    frame_number);
2589
2590            for (pendingRequestIterator k = mPendingRequestsList.begin();
2591                    k != mPendingRequestsList.end(); k++) {
2592
2593                if (k->frame_number == j->frame_number) {
2594                    LOGD("Found reprocess frame number %d in pending reprocess List "
2595                            "Take it out!!",
2596                            k->frame_number);
2597
2598                    camera3_capture_result result;
2599                    memset(&result, 0, sizeof(camera3_capture_result));
2600                    result.frame_number = frame_number;
2601                    result.num_output_buffers = 1;
2602                    result.output_buffers =  &j->buffer;
2603                    result.input_buffer = k->input_buffer;
2604                    result.result = k->settings;
2605                    result.partial_result = PARTIAL_RESULT_COUNT;
2606                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2607
2608                    erasePendingRequest(k);
2609                    break;
2610                }
2611            }
2612            mPendingReprocessResultList.erase(j);
2613            break;
2614        }
2615    }
2616    return NO_ERROR;
2617}
2618
2619/*===========================================================================
2620 * FUNCTION   : handleBatchMetadata
2621 *
2622 * DESCRIPTION: Handles metadata buffer callback in batch mode
2623 *
2624 * PARAMETERS : @metadata_buf: metadata buffer
2625 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2626 *                 the meta buf in this method
2627 *
2628 * RETURN     :
2629 *
2630 *==========================================================================*/
2631void QCamera3HardwareInterface::handleBatchMetadata(
2632        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2633{
2634    ATRACE_CALL();
2635
2636    if (NULL == metadata_buf) {
2637        LOGE("metadata_buf is NULL");
2638        return;
2639    }
2640    /* In batch mode, the metdata will contain the frame number and timestamp of
2641     * the last frame in the batch. Eg: a batch containing buffers from request
2642     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2643     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2644     * multiple process_capture_results */
2645    metadata_buffer_t *metadata =
2646            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2647    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2648    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2649    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2650    uint32_t frame_number = 0, urgent_frame_number = 0;
2651    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2652    bool invalid_metadata = false;
2653    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2654    size_t loopCount = 1;
2655
2656    int32_t *p_frame_number_valid =
2657            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2658    uint32_t *p_frame_number =
2659            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2660    int64_t *p_capture_time =
2661            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2662    int32_t *p_urgent_frame_number_valid =
2663            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2664    uint32_t *p_urgent_frame_number =
2665            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2666
2667    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2668            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2669            (NULL == p_urgent_frame_number)) {
2670        LOGE("Invalid metadata");
2671        invalid_metadata = true;
2672    } else {
2673        frame_number_valid = *p_frame_number_valid;
2674        last_frame_number = *p_frame_number;
2675        last_frame_capture_time = *p_capture_time;
2676        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2677        last_urgent_frame_number = *p_urgent_frame_number;
2678    }
2679
2680    /* In batchmode, when no video buffers are requested, set_parms are sent
2681     * for every capture_request. The difference between consecutive urgent
2682     * frame numbers and frame numbers should be used to interpolate the
2683     * corresponding frame numbers and time stamps */
2684    pthread_mutex_lock(&mMutex);
2685    if (urgent_frame_number_valid) {
2686        first_urgent_frame_number =
2687                mPendingBatchMap.valueFor(last_urgent_frame_number);
2688        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2689                first_urgent_frame_number;
2690
2691        LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2692                 urgent_frame_number_valid,
2693                first_urgent_frame_number, last_urgent_frame_number);
2694    }
2695
2696    if (frame_number_valid) {
2697        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2698        frameNumDiff = last_frame_number + 1 -
2699                first_frame_number;
2700        mPendingBatchMap.removeItem(last_frame_number);
2701
2702        LOGD("frm: valid: %d frm_num: %d - %d",
2703                 frame_number_valid,
2704                first_frame_number, last_frame_number);
2705
2706    }
2707    pthread_mutex_unlock(&mMutex);
2708
2709    if (urgent_frame_number_valid || frame_number_valid) {
2710        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2711        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2712            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2713                     urgentFrameNumDiff, last_urgent_frame_number);
2714        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2715            LOGE("frameNumDiff: %d frameNum: %d",
2716                     frameNumDiff, last_frame_number);
2717    }
2718
2719    for (size_t i = 0; i < loopCount; i++) {
2720        /* handleMetadataWithLock is called even for invalid_metadata for
2721         * pipeline depth calculation */
2722        if (!invalid_metadata) {
2723            /* Infer frame number. Batch metadata contains frame number of the
2724             * last frame */
2725            if (urgent_frame_number_valid) {
2726                if (i < urgentFrameNumDiff) {
2727                    urgent_frame_number =
2728                            first_urgent_frame_number + i;
2729                    LOGD("inferred urgent frame_number: %d",
2730                             urgent_frame_number);
2731                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2732                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2733                } else {
2734                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2735                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2736                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2737                }
2738            }
2739
2740            /* Infer frame number. Batch metadata contains frame number of the
2741             * last frame */
2742            if (frame_number_valid) {
2743                if (i < frameNumDiff) {
2744                    frame_number = first_frame_number + i;
2745                    LOGD("inferred frame_number: %d", frame_number);
2746                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2747                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2748                } else {
2749                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2750                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2751                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2752                }
2753            }
2754
2755            if (last_frame_capture_time) {
2756                //Infer timestamp
2757                first_frame_capture_time = last_frame_capture_time -
2758                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2759                capture_time =
2760                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2761                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2762                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2763                LOGD("batch capture_time: %lld, capture_time: %lld",
2764                         last_frame_capture_time, capture_time);
2765            }
2766        }
2767        pthread_mutex_lock(&mMutex);
2768        handleMetadataWithLock(metadata_buf,
2769                false /* free_and_bufdone_meta_buf */);
2770        pthread_mutex_unlock(&mMutex);
2771    }
2772
2773    /* BufDone metadata buffer */
2774    if (free_and_bufdone_meta_buf) {
2775        mMetadataChannel->bufDone(metadata_buf);
2776        free(metadata_buf);
2777    }
2778}
2779
2780/*===========================================================================
2781 * FUNCTION   : handleMetadataWithLock
2782 *
2783 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2784 *
2785 * PARAMETERS : @metadata_buf: metadata buffer
2786 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2787 *                 the meta buf in this method
2788 *
2789 * RETURN     :
2790 *
2791 *==========================================================================*/
2792void QCamera3HardwareInterface::handleMetadataWithLock(
2793    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2794{
2795    ATRACE_CALL();
2796    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2797        //during flush do not send metadata from this thread
2798        LOGD("not sending metadata during flush or when mState is error");
2799        if (free_and_bufdone_meta_buf) {
2800            mMetadataChannel->bufDone(metadata_buf);
2801            free(metadata_buf);
2802        }
2803        return;
2804    }
2805
2806    //not in flush
2807    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2808    int32_t frame_number_valid, urgent_frame_number_valid;
2809    uint32_t frame_number, urgent_frame_number;
2810    int64_t capture_time;
2811    nsecs_t currentSysTime;
2812
2813    int32_t *p_frame_number_valid =
2814            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2815    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2816    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2817    int32_t *p_urgent_frame_number_valid =
2818            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2819    uint32_t *p_urgent_frame_number =
2820            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2821    IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2822            metadata) {
2823        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2824                 *p_frame_number_valid, *p_frame_number);
2825    }
2826
2827    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2828            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2829        LOGE("Invalid metadata");
2830        if (free_and_bufdone_meta_buf) {
2831            mMetadataChannel->bufDone(metadata_buf);
2832            free(metadata_buf);
2833        }
2834        goto done_metadata;
2835    }
2836    frame_number_valid =        *p_frame_number_valid;
2837    frame_number =              *p_frame_number;
2838    capture_time =              *p_capture_time;
2839    urgent_frame_number_valid = *p_urgent_frame_number_valid;
2840    urgent_frame_number =       *p_urgent_frame_number;
2841    currentSysTime =            systemTime(CLOCK_MONOTONIC);
2842
2843    // Detect if buffers from any requests are overdue
2844    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2845        if ( (currentSysTime - req.timestamp) >
2846            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2847            for (auto &missed : req.mPendingBufferList) {
2848                LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
2849                    "stream type = %d, stream format = %d",
2850                    frame_number, req.frame_number, missed.buffer,
2851                    missed.stream->stream_type, missed.stream->format);
2852            }
2853        }
2854    }
2855    //Partial result on process_capture_result for timestamp
2856    if (urgent_frame_number_valid) {
2857        LOGD("valid urgent frame_number = %u, capture_time = %lld",
2858           urgent_frame_number, capture_time);
2859
2860        //Recieved an urgent Frame Number, handle it
2861        //using partial results
2862        for (pendingRequestIterator i =
2863                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2864            LOGD("Iterator Frame = %d urgent frame = %d",
2865                 i->frame_number, urgent_frame_number);
2866
2867            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2868                (i->partial_result_cnt == 0)) {
2869                LOGE("Error: HAL missed urgent metadata for frame number %d",
2870                         i->frame_number);
2871            }
2872
2873            if (i->frame_number == urgent_frame_number &&
2874                     i->bUrgentReceived == 0) {
2875
2876                camera3_capture_result_t result;
2877                memset(&result, 0, sizeof(camera3_capture_result_t));
2878
2879                i->partial_result_cnt++;
2880                i->bUrgentReceived = 1;
2881                // Extract 3A metadata
2882                result.result =
2883                    translateCbUrgentMetadataToResultMetadata(metadata);
2884                // Populate metadata result
2885                result.frame_number = urgent_frame_number;
2886                result.num_output_buffers = 0;
2887                result.output_buffers = NULL;
2888                result.partial_result = i->partial_result_cnt;
2889
2890                mCallbackOps->process_capture_result(mCallbackOps, &result);
2891                LOGD("urgent frame_number = %u, capture_time = %lld",
2892                      result.frame_number, capture_time);
2893                free_camera_metadata((camera_metadata_t *)result.result);
2894                break;
2895            }
2896        }
2897    }
2898
2899    if (!frame_number_valid) {
2900        LOGD("Not a valid normal frame number, used as SOF only");
2901        if (free_and_bufdone_meta_buf) {
2902            mMetadataChannel->bufDone(metadata_buf);
2903            free(metadata_buf);
2904        }
2905        goto done_metadata;
2906    }
2907    LOGH("valid frame_number = %u, capture_time = %lld",
2908            frame_number, capture_time);
2909
2910    for (pendingRequestIterator i = mPendingRequestsList.begin();
2911            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2912        // Flush out all entries with less or equal frame numbers.
2913
2914        camera3_capture_result_t result;
2915        memset(&result, 0, sizeof(camera3_capture_result_t));
2916
2917        LOGD("frame_number in the list is %u", i->frame_number);
2918        i->partial_result_cnt++;
2919        result.partial_result = i->partial_result_cnt;
2920
2921        // Check whether any stream buffer corresponding to this is dropped or not
2922        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2923        // The API does not expect a blob buffer to be dropped
2924        if (p_cam_frame_drop) {
2925            /* Clear notify_msg structure */
2926            camera3_notify_msg_t notify_msg;
2927            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2928            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2929                    j != i->buffers.end(); j++) {
2930                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2931                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2932                for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
2933                    if (streamID == p_cam_frame_drop->streamID[k]) {
2934                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2935                        LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2936                                __func__, i->frame_number, streamID, j->stream->format);
2937                        notify_msg.type = CAMERA3_MSG_ERROR;
2938                        notify_msg.message.error.frame_number = i->frame_number;
2939                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
2940                        notify_msg.message.error.error_stream = j->stream;
2941                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2942                        LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2943                                __func__, i->frame_number, streamID, j->stream->format);
2944                        PendingFrameDropInfo PendingFrameDrop;
2945                        PendingFrameDrop.frame_number=i->frame_number;
2946                        PendingFrameDrop.stream_ID = streamID;
2947                        // Add the Frame drop info to mPendingFrameDropList
2948                        mPendingFrameDropList.push_back(PendingFrameDrop);
2949                   }
2950               }
2951            }
2952        }
2953
2954        // Send empty metadata with already filled buffers for dropped metadata
2955        // and send valid metadata with already filled buffers for current metadata
2956        /* we could hit this case when we either
2957         * 1. have a pending reprocess request or
2958         * 2. miss a metadata buffer callback */
2959        if (i->frame_number < frame_number) {
2960            if (i->input_buffer) {
2961                /* this will be handled in handleInputBufferWithLock */
2962                i++;
2963                continue;
2964            } else {
2965                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
2966                if (free_and_bufdone_meta_buf) {
2967                    mMetadataChannel->bufDone(metadata_buf);
2968                    free(metadata_buf);
2969                }
2970                mState = ERROR;
2971                goto done_metadata;
2972            }
2973        } else {
2974            mPendingLiveRequest--;
2975            /* Clear notify_msg structure */
2976            camera3_notify_msg_t notify_msg;
2977            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2978
2979            // Send shutter notify to frameworks
2980            notify_msg.type = CAMERA3_MSG_SHUTTER;
2981            notify_msg.message.shutter.frame_number = i->frame_number;
2982            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2983            mCallbackOps->notify(mCallbackOps, &notify_msg);
2984
2985            i->timestamp = capture_time;
2986
2987            // Find channel requiring metadata, meaning internal offline postprocess
2988            // is needed.
2989            //TODO: for now, we don't support two streams requiring metadata at the same time.
2990            // (because we are not making copies, and metadata buffer is not reference counted.
2991            bool internalPproc = false;
2992            for (pendingBufferIterator iter = i->buffers.begin();
2993                    iter != i->buffers.end(); iter++) {
2994                if (iter->need_metadata) {
2995                    internalPproc = true;
2996                    QCamera3ProcessingChannel *channel =
2997                            (QCamera3ProcessingChannel *)iter->stream->priv;
2998                    channel->queueReprocMetadata(metadata_buf);
2999                    break;
3000                }
3001            }
3002
3003            result.result = translateFromHalMetadata(metadata,
3004                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
3005                    i->capture_intent, i->hybrid_ae_enable,
3006                     /* DevCamDebug metadata translateFromHalMetadata function call*/
3007                    i->DevCamDebug_meta_enable,
3008                    /* DevCamDebug metadata end */
3009                    internalPproc, i->fwkCacMode);
3010
3011            saveExifParams(metadata);
3012
3013            if (i->blob_request) {
3014                {
3015                    //Dump tuning metadata if enabled and available
3016                    char prop[PROPERTY_VALUE_MAX];
3017                    memset(prop, 0, sizeof(prop));
3018                    property_get("persist.camera.dumpmetadata", prop, "0");
3019                    int32_t enabled = atoi(prop);
3020                    if (enabled && metadata->is_tuning_params_valid) {
3021                        dumpMetadataToFile(metadata->tuning_params,
3022                               mMetaFrameCount,
3023                               enabled,
3024                               "Snapshot",
3025                               frame_number);
3026                    }
3027                }
3028            }
3029
3030            if (!internalPproc) {
3031                LOGD("couldn't find need_metadata for this metadata");
3032                // Return metadata buffer
3033                if (free_and_bufdone_meta_buf) {
3034                    mMetadataChannel->bufDone(metadata_buf);
3035                    free(metadata_buf);
3036                }
3037            }
3038        }
3039        if (!result.result) {
3040            LOGE("metadata is NULL");
3041        }
3042        result.frame_number = i->frame_number;
3043        result.input_buffer = i->input_buffer;
3044        result.num_output_buffers = 0;
3045        result.output_buffers = NULL;
3046        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3047                    j != i->buffers.end(); j++) {
3048            if (j->buffer) {
3049                result.num_output_buffers++;
3050            }
3051        }
3052
3053        updateFpsInPreviewBuffer(metadata, i->frame_number);
3054
3055        if (result.num_output_buffers > 0) {
3056            camera3_stream_buffer_t *result_buffers =
3057                new camera3_stream_buffer_t[result.num_output_buffers];
3058            if (result_buffers != NULL) {
3059                size_t result_buffers_idx = 0;
3060                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3061                        j != i->buffers.end(); j++) {
3062                    if (j->buffer) {
3063                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3064                                m != mPendingFrameDropList.end(); m++) {
3065                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3066                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3067                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3068                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3069                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3070                                        frame_number, streamID);
3071                                m = mPendingFrameDropList.erase(m);
3072                                break;
3073                            }
3074                        }
3075                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
3076                        result_buffers[result_buffers_idx++] = *(j->buffer);
3077                        free(j->buffer);
3078                        j->buffer = NULL;
3079                    }
3080                }
3081                result.output_buffers = result_buffers;
3082                mCallbackOps->process_capture_result(mCallbackOps, &result);
3083                LOGD("meta frame_number = %u, capture_time = %lld",
3084                        result.frame_number, i->timestamp);
3085                free_camera_metadata((camera_metadata_t *)result.result);
3086                delete[] result_buffers;
3087            }else {
3088                LOGE("Fatal error: out of memory");
3089            }
3090        } else {
3091            mCallbackOps->process_capture_result(mCallbackOps, &result);
3092            LOGD("meta frame_number = %u, capture_time = %lld",
3093                    result.frame_number, i->timestamp);
3094            free_camera_metadata((camera_metadata_t *)result.result);
3095        }
3096
3097        i = erasePendingRequest(i);
3098
3099        if (!mPendingReprocessResultList.empty()) {
3100            handlePendingReprocResults(frame_number + 1);
3101        }
3102    }
3103
3104done_metadata:
3105    for (pendingRequestIterator i = mPendingRequestsList.begin();
3106            i != mPendingRequestsList.end() ;i++) {
3107        i->pipeline_depth++;
3108    }
3109    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3110    unblockRequestIfNecessary();
3111}
3112
3113/*===========================================================================
3114 * FUNCTION   : hdrPlusPerfLock
3115 *
3116 * DESCRIPTION: perf lock for HDR+ using custom intent
3117 *
3118 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3119 *
3120 * RETURN     : None
3121 *
3122 *==========================================================================*/
3123void QCamera3HardwareInterface::hdrPlusPerfLock(
3124        mm_camera_super_buf_t *metadata_buf)
3125{
3126    if (NULL == metadata_buf) {
3127        LOGE("metadata_buf is NULL");
3128        return;
3129    }
3130    metadata_buffer_t *metadata =
3131            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3132    int32_t *p_frame_number_valid =
3133            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3134    uint32_t *p_frame_number =
3135            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3136
3137    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3138        LOGE("%s: Invalid metadata", __func__);
3139        return;
3140    }
3141
3142    //acquire perf lock for 5 sec after the last HDR frame is captured
3143    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3144        if ((p_frame_number != NULL) &&
3145                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3146            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3147        }
3148    }
3149
3150    //release lock after perf lock timer is expired. If lock is already released,
3151    //isTimerReset returns false
3152    if (m_perfLock.isTimerReset()) {
3153        mLastCustIntentFrmNum = -1;
3154        m_perfLock.lock_rel_timed();
3155    }
3156}
3157
3158/*===========================================================================
3159 * FUNCTION   : handleInputBufferWithLock
3160 *
3161 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3162 *
3163 * PARAMETERS : @frame_number: frame number of the input buffer
3164 *
3165 * RETURN     :
3166 *
3167 *==========================================================================*/
3168void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3169{
3170    ATRACE_CALL();
3171    pendingRequestIterator i = mPendingRequestsList.begin();
3172    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3173        i++;
3174    }
3175    if (i != mPendingRequestsList.end() && i->input_buffer) {
3176        //found the right request
3177        if (!i->shutter_notified) {
3178            CameraMetadata settings;
3179            camera3_notify_msg_t notify_msg;
3180            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3181            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3182            if(i->settings) {
3183                settings = i->settings;
3184                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3185                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3186                } else {
3187                    LOGE("No timestamp in input settings! Using current one.");
3188                }
3189            } else {
3190                LOGE("Input settings missing!");
3191            }
3192
3193            notify_msg.type = CAMERA3_MSG_SHUTTER;
3194            notify_msg.message.shutter.frame_number = frame_number;
3195            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3196            mCallbackOps->notify(mCallbackOps, &notify_msg);
3197            i->shutter_notified = true;
3198            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3199                        i->frame_number, notify_msg.message.shutter.timestamp);
3200        }
3201
3202        if (i->input_buffer->release_fence != -1) {
3203           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3204           close(i->input_buffer->release_fence);
3205           if (rc != OK) {
3206               LOGE("input buffer sync wait failed %d", rc);
3207           }
3208        }
3209
3210        camera3_capture_result result;
3211        memset(&result, 0, sizeof(camera3_capture_result));
3212        result.frame_number = frame_number;
3213        result.result = i->settings;
3214        result.input_buffer = i->input_buffer;
3215        result.partial_result = PARTIAL_RESULT_COUNT;
3216
3217        mCallbackOps->process_capture_result(mCallbackOps, &result);
3218        LOGD("Input request metadata and input buffer frame_number = %u",
3219                        i->frame_number);
3220        i = erasePendingRequest(i);
3221    } else {
3222        LOGE("Could not find input request for frame number %d", frame_number);
3223    }
3224}
3225
3226/*===========================================================================
3227 * FUNCTION   : handleBufferWithLock
3228 *
3229 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3230 *
3231 * PARAMETERS : @buffer: image buffer for the callback
3232 *              @frame_number: frame number of the image buffer
3233 *
3234 * RETURN     :
3235 *
3236 *==========================================================================*/
3237void QCamera3HardwareInterface::handleBufferWithLock(
3238    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3239{
3240    ATRACE_CALL();
3241    /* Nothing to be done during error state */
3242    if ((ERROR == mState) || (DEINIT == mState)) {
3243        return;
3244    }
3245    if (mFlushPerf) {
3246        handleBuffersDuringFlushLock(buffer);
3247        return;
3248    }
3249    //not in flush
3250    // If the frame number doesn't exist in the pending request list,
3251    // directly send the buffer to the frameworks, and update pending buffers map
3252    // Otherwise, book-keep the buffer.
3253    pendingRequestIterator i = mPendingRequestsList.begin();
3254    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3255        i++;
3256    }
3257    if (i == mPendingRequestsList.end()) {
3258        // Verify all pending requests frame_numbers are greater
3259        for (pendingRequestIterator j = mPendingRequestsList.begin();
3260                j != mPendingRequestsList.end(); j++) {
3261            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3262                LOGW("Error: pending live frame number %d is smaller than %d",
3263                         j->frame_number, frame_number);
3264            }
3265        }
3266        camera3_capture_result_t result;
3267        memset(&result, 0, sizeof(camera3_capture_result_t));
3268        result.result = NULL;
3269        result.frame_number = frame_number;
3270        result.num_output_buffers = 1;
3271        result.partial_result = 0;
3272        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3273                m != mPendingFrameDropList.end(); m++) {
3274            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3275            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3276            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3277                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3278                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3279                         frame_number, streamID);
3280                m = mPendingFrameDropList.erase(m);
3281                break;
3282            }
3283        }
3284        result.output_buffers = buffer;
3285        LOGH("result frame_number = %d, buffer = %p",
3286                 frame_number, buffer->buffer);
3287
3288        mPendingBuffersMap.removeBuf(buffer->buffer);
3289
3290        mCallbackOps->process_capture_result(mCallbackOps, &result);
3291    } else {
3292        if (i->input_buffer) {
3293            CameraMetadata settings;
3294            camera3_notify_msg_t notify_msg;
3295            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3296            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3297            if(i->settings) {
3298                settings = i->settings;
3299                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3300                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3301                } else {
3302                    LOGW("No timestamp in input settings! Using current one.");
3303                }
3304            } else {
3305                LOGE("Input settings missing!");
3306            }
3307
3308            notify_msg.type = CAMERA3_MSG_SHUTTER;
3309            notify_msg.message.shutter.frame_number = frame_number;
3310            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3311
3312            if (i->input_buffer->release_fence != -1) {
3313               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3314               close(i->input_buffer->release_fence);
3315               if (rc != OK) {
3316                   LOGE("input buffer sync wait failed %d", rc);
3317               }
3318            }
3319            mPendingBuffersMap.removeBuf(buffer->buffer);
3320
3321            bool notifyNow = true;
3322            for (pendingRequestIterator j = mPendingRequestsList.begin();
3323                    j != mPendingRequestsList.end(); j++) {
3324                if (j->frame_number < frame_number) {
3325                    notifyNow = false;
3326                    break;
3327                }
3328            }
3329
3330            if (notifyNow) {
3331                camera3_capture_result result;
3332                memset(&result, 0, sizeof(camera3_capture_result));
3333                result.frame_number = frame_number;
3334                result.result = i->settings;
3335                result.input_buffer = i->input_buffer;
3336                result.num_output_buffers = 1;
3337                result.output_buffers = buffer;
3338                result.partial_result = PARTIAL_RESULT_COUNT;
3339
3340                mCallbackOps->notify(mCallbackOps, &notify_msg);
3341                mCallbackOps->process_capture_result(mCallbackOps, &result);
3342                LOGD("Notify reprocess now %d!", frame_number);
3343                i = erasePendingRequest(i);
3344            } else {
3345                // Cache reprocess result for later
3346                PendingReprocessResult pendingResult;
3347                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3348                pendingResult.notify_msg = notify_msg;
3349                pendingResult.buffer = *buffer;
3350                pendingResult.frame_number = frame_number;
3351                mPendingReprocessResultList.push_back(pendingResult);
3352                LOGD("Cache reprocess result %d!", frame_number);
3353            }
3354        } else {
3355            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3356                j != i->buffers.end(); j++) {
3357                if (j->stream == buffer->stream) {
3358                    if (j->buffer != NULL) {
3359                        LOGE("Error: buffer is already set");
3360                    } else {
3361                        j->buffer = (camera3_stream_buffer_t *)malloc(
3362                            sizeof(camera3_stream_buffer_t));
3363                        *(j->buffer) = *buffer;
3364                        LOGH("cache buffer %p at result frame_number %u",
3365                             buffer->buffer, frame_number);
3366                    }
3367                }
3368            }
3369        }
3370    }
3371}
3372
3373/*===========================================================================
3374 * FUNCTION   : unblockRequestIfNecessary
3375 *
3376 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3377 *              that mMutex is held when this function is called.
3378 *
3379 * PARAMETERS :
3380 *
3381 * RETURN     :
3382 *
3383 *==========================================================================*/
3384void QCamera3HardwareInterface::unblockRequestIfNecessary()
3385{
3386   // Unblock process_capture_request
3387   pthread_cond_signal(&mRequestCond);
3388}
3389
3390
3391/*===========================================================================
3392 * FUNCTION   : processCaptureRequest
3393 *
3394 * DESCRIPTION: process a capture request from camera service
3395 *
3396 * PARAMETERS :
3397 *   @request : request from framework to process
3398 *
3399 * RETURN     :
3400 *
3401 *==========================================================================*/
3402int QCamera3HardwareInterface::processCaptureRequest(
3403                    camera3_capture_request_t *request)
3404{
3405    ATRACE_CALL();
3406    int rc = NO_ERROR;
3407    int32_t request_id;
3408    CameraMetadata meta;
3409    bool isVidBufRequested = false;
3410    camera3_stream_buffer_t *pInputBuffer = NULL;
3411
3412    pthread_mutex_lock(&mMutex);
3413
3414    // Validate current state
3415    switch (mState) {
3416        case CONFIGURED:
3417        case STARTED:
3418            /* valid state */
3419            break;
3420
3421        case ERROR:
3422            pthread_mutex_unlock(&mMutex);
3423            handleCameraDeviceError();
3424            return -ENODEV;
3425
3426        default:
3427            LOGE("Invalid state %d", mState);
3428            pthread_mutex_unlock(&mMutex);
3429            return -ENODEV;
3430    }
3431
3432    rc = validateCaptureRequest(request);
3433    if (rc != NO_ERROR) {
3434        LOGE("incoming request is not valid");
3435        pthread_mutex_unlock(&mMutex);
3436        return rc;
3437    }
3438
3439    meta = request->settings;
3440
3441    // For first capture request, send capture intent, and
3442    // stream on all streams
3443    if (mState == CONFIGURED) {
3444        // send an unconfigure to the backend so that the isp
3445        // resources are deallocated
3446        if (!mFirstConfiguration) {
3447            cam_stream_size_info_t stream_config_info;
3448            int32_t hal_version = CAM_HAL_V3;
3449            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3450            stream_config_info.buffer_info.min_buffers =
3451                    MIN_INFLIGHT_REQUESTS;
3452            stream_config_info.buffer_info.max_buffers =
3453                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3454            clear_metadata_buffer(mParameters);
3455            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3456                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3457            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3458                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3459            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3460                    mParameters);
3461            if (rc < 0) {
3462                LOGE("set_parms for unconfigure failed");
3463                pthread_mutex_unlock(&mMutex);
3464                return rc;
3465            }
3466        }
3467        m_perfLock.lock_acq();
3468        /* get eis information for stream configuration */
3469        cam_is_type_t is_type;
3470        char is_type_value[PROPERTY_VALUE_MAX];
3471        property_get("persist.camera.is_type", is_type_value, "0");
3472        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3473
3474        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3475            int32_t hal_version = CAM_HAL_V3;
3476            uint8_t captureIntent =
3477                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3478            mCaptureIntent = captureIntent;
3479            clear_metadata_buffer(mParameters);
3480            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3481            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3482        }
3483
3484        //If EIS is enabled, turn it on for video
3485        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3486        int32_t vsMode;
3487        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3488        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3489            rc = BAD_VALUE;
3490        }
3491
3492        //IS type will be 0 unless EIS is supported. If EIS is supported
3493        //it could either be 1 or 4 depending on the stream and video size
3494        if (setEis) {
3495            if (!m_bEisSupportedSize) {
3496                is_type = IS_TYPE_DIS;
3497            } else {
3498                is_type = IS_TYPE_EIS_2_0;
3499            }
3500            mStreamConfigInfo.is_type = is_type;
3501        } else {
3502            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3503        }
3504
3505        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3506                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3507        int32_t tintless_value = 1;
3508        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3509                CAM_INTF_PARM_TINTLESS, tintless_value);
3510        //Disable CDS for HFR mode or if DIS/EIS is on.
3511        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3512        //after every configure_stream
3513        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3514                (m_bIsVideo)) {
3515            int32_t cds = CAM_CDS_MODE_OFF;
3516            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3517                    CAM_INTF_PARM_CDS_MODE, cds))
3518                LOGE("Failed to disable CDS for HFR mode");
3519
3520        }
3521        setMobicat();
3522
3523        /* Set fps and hfr mode while sending meta stream info so that sensor
3524         * can configure appropriate streaming mode */
3525        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3526        mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3527        mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3528        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3529            rc = setHalFpsRange(meta, mParameters);
3530            if (rc == NO_ERROR) {
3531                int32_t max_fps =
3532                    (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3533                if (max_fps == 60) {
3534                    mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3535                }
3536                /* For HFR, more buffers are dequeued upfront to improve the performance */
3537                if (mBatchSize) {
3538                    mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3539                    mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3540                }
3541            }
3542            else {
3543                LOGE("setHalFpsRange failed");
3544            }
3545        }
3546        if (meta.exists(ANDROID_CONTROL_MODE)) {
3547            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3548            rc = extractSceneMode(meta, metaMode, mParameters);
3549            if (rc != NO_ERROR) {
3550                LOGE("extractSceneMode failed");
3551            }
3552        }
3553
3554        //TODO: validate the arguments, HSV scenemode should have only the
3555        //advertised fps ranges
3556
3557        /*set the capture intent, hal version, tintless, stream info,
3558         *and disenable parameters to the backend*/
3559        LOGD("set_parms META_STREAM_INFO " );
3560        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3561            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
3562                    "Format:%d",
3563                    mStreamConfigInfo.type[i],
3564                    mStreamConfigInfo.stream_sizes[i].width,
3565                    mStreamConfigInfo.stream_sizes[i].height,
3566                    mStreamConfigInfo.postprocess_mask[i],
3567                    mStreamConfigInfo.format[i]);
3568        }
3569        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3570                    mParameters);
3571        if (rc < 0) {
3572            LOGE("set_parms failed for hal version, stream info");
3573        }
3574
3575        cam_dimension_t sensor_dim;
3576        memset(&sensor_dim, 0, sizeof(sensor_dim));
3577        rc = getSensorOutputSize(sensor_dim);
3578        if (rc != NO_ERROR) {
3579            LOGE("Failed to get sensor output size");
3580            pthread_mutex_unlock(&mMutex);
3581            goto error_exit;
3582        }
3583
3584        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3585                gCamCapability[mCameraId]->active_array_size.height,
3586                sensor_dim.width, sensor_dim.height);
3587
3588        /* Set batchmode before initializing channel. Since registerBuffer
3589         * internally initializes some of the channels, better set batchmode
3590         * even before first register buffer */
3591        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3592            it != mStreamInfo.end(); it++) {
3593            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3594            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3595                    && mBatchSize) {
3596                rc = channel->setBatchSize(mBatchSize);
3597                //Disable per frame map unmap for HFR/batchmode case
3598                rc |= channel->setPerFrameMapUnmap(false);
3599                if (NO_ERROR != rc) {
3600                    LOGE("Channel init failed %d", rc);
3601                    pthread_mutex_unlock(&mMutex);
3602                    goto error_exit;
3603                }
3604            }
3605        }
3606
3607        //First initialize all streams
3608        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3609            it != mStreamInfo.end(); it++) {
3610            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3611            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3612               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3613               setEis)
3614                rc = channel->initialize(is_type);
3615            else {
3616                rc = channel->initialize(IS_TYPE_NONE);
3617            }
3618            if (NO_ERROR != rc) {
3619                LOGE("Channel initialization failed %d", rc);
3620                pthread_mutex_unlock(&mMutex);
3621                goto error_exit;
3622            }
3623        }
3624
3625        if (mRawDumpChannel) {
3626            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3627            if (rc != NO_ERROR) {
3628                LOGE("Error: Raw Dump Channel init failed");
3629                pthread_mutex_unlock(&mMutex);
3630                goto error_exit;
3631            }
3632        }
3633        if (mSupportChannel) {
3634            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3635            if (rc < 0) {
3636                LOGE("Support channel initialization failed");
3637                pthread_mutex_unlock(&mMutex);
3638                goto error_exit;
3639            }
3640        }
3641        if (mAnalysisChannel) {
3642            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3643            if (rc < 0) {
3644                LOGE("Analysis channel initialization failed");
3645                pthread_mutex_unlock(&mMutex);
3646                goto error_exit;
3647            }
3648        }
3649        if (mDummyBatchChannel) {
3650            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3651            if (rc < 0) {
3652                LOGE("mDummyBatchChannel setBatchSize failed");
3653                pthread_mutex_unlock(&mMutex);
3654                goto error_exit;
3655            }
3656            rc = mDummyBatchChannel->initialize(is_type);
3657            if (rc < 0) {
3658                LOGE("mDummyBatchChannel initialization failed");
3659                pthread_mutex_unlock(&mMutex);
3660                goto error_exit;
3661            }
3662        }
3663
3664        // Set bundle info
3665        rc = setBundleInfo();
3666        if (rc < 0) {
3667            LOGE("setBundleInfo failed %d", rc);
3668            pthread_mutex_unlock(&mMutex);
3669            goto error_exit;
3670        }
3671
3672        //update settings from app here
3673        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3674            mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3675            LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3676        }
3677        if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3678            mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3679            LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3680        }
3681        if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3682            mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3683            LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3684
3685            if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3686                (mLinkedCameraId != mCameraId) ) {
3687                LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3688                    mLinkedCameraId, mCameraId);
3689                goto error_exit;
3690            }
3691        }
3692
3693        // add bundle related cameras
3694        LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3695        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3696            if (mIsDeviceLinked)
3697                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3698            else
3699                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3700
3701            pthread_mutex_lock(&gCamLock);
3702
3703            if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3704                LOGE("Dualcam: Invalid Session Id ");
3705                pthread_mutex_unlock(&gCamLock);
3706                goto error_exit;
3707            }
3708
3709            if (mIsMainCamera == 1) {
3710                m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3711                m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3712                // related session id should be session id of linked session
3713                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3714            } else {
3715                m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3716                m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3717                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3718            }
3719            pthread_mutex_unlock(&gCamLock);
3720
3721            rc = mCameraHandle->ops->sync_related_sensors(
3722                    mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3723            if (rc < 0) {
3724                LOGE("Dualcam: link failed");
3725                goto error_exit;
3726            }
3727        }
3728
3729        //Then start them.
3730        LOGH("Start META Channel");
3731        rc = mMetadataChannel->start();
3732        if (rc < 0) {
3733            LOGE("META channel start failed");
3734            pthread_mutex_unlock(&mMutex);
3735            goto error_exit;
3736        }
3737
3738        if (mAnalysisChannel) {
3739            rc = mAnalysisChannel->start();
3740            if (rc < 0) {
3741                LOGE("Analysis channel start failed");
3742                mMetadataChannel->stop();
3743                pthread_mutex_unlock(&mMutex);
3744                goto error_exit;
3745            }
3746        }
3747
3748        if (mSupportChannel) {
3749            rc = mSupportChannel->start();
3750            if (rc < 0) {
3751                LOGE("Support channel start failed");
3752                mMetadataChannel->stop();
3753                /* Although support and analysis are mutually exclusive today
3754                   adding it in anycase for future proofing */
3755                if (mAnalysisChannel) {
3756                    mAnalysisChannel->stop();
3757                }
3758                pthread_mutex_unlock(&mMutex);
3759                goto error_exit;
3760            }
3761        }
3762        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3763            it != mStreamInfo.end(); it++) {
3764            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3765            LOGH("Start Processing Channel mask=%d",
3766                     channel->getStreamTypeMask());
3767            rc = channel->start();
3768            if (rc < 0) {
3769                LOGE("channel start failed");
3770                pthread_mutex_unlock(&mMutex);
3771                goto error_exit;
3772            }
3773        }
3774
3775        if (mRawDumpChannel) {
3776            LOGD("Starting raw dump stream");
3777            rc = mRawDumpChannel->start();
3778            if (rc != NO_ERROR) {
3779                LOGE("Error Starting Raw Dump Channel");
3780                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3781                      it != mStreamInfo.end(); it++) {
3782                    QCamera3Channel *channel =
3783                        (QCamera3Channel *)(*it)->stream->priv;
3784                    LOGH("Stopping Processing Channel mask=%d",
3785                        channel->getStreamTypeMask());
3786                    channel->stop();
3787                }
3788                if (mSupportChannel)
3789                    mSupportChannel->stop();
3790                if (mAnalysisChannel) {
3791                    mAnalysisChannel->stop();
3792                }
3793                mMetadataChannel->stop();
3794                pthread_mutex_unlock(&mMutex);
3795                goto error_exit;
3796            }
3797        }
3798
3799        if (mChannelHandle) {
3800
3801            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3802                    mChannelHandle);
3803            if (rc != NO_ERROR) {
3804                LOGE("start_channel failed %d", rc);
3805                pthread_mutex_unlock(&mMutex);
3806                goto error_exit;
3807            }
3808        }
3809
3810        goto no_error;
3811error_exit:
3812        m_perfLock.lock_rel();
3813        return rc;
3814no_error:
3815        m_perfLock.lock_rel();
3816
3817        mWokenUpByDaemon = false;
3818        mPendingLiveRequest = 0;
3819        mFirstConfiguration = false;
3820        enablePowerHint();
3821    }
3822
3823    uint32_t frameNumber = request->frame_number;
3824    cam_stream_ID_t streamID;
3825
3826    if (mFlushPerf) {
3827        //we cannot accept any requests during flush
3828        LOGE("process_capture_request cannot proceed during flush");
3829        pthread_mutex_unlock(&mMutex);
3830        return NO_ERROR; //should return an error
3831    }
3832
3833    if (meta.exists(ANDROID_REQUEST_ID)) {
3834        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3835        mCurrentRequestId = request_id;
3836        LOGD("Received request with id: %d", request_id);
3837    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3838        LOGE("Unable to find request id field, \
3839                & no previous id available");
3840        pthread_mutex_unlock(&mMutex);
3841        return NAME_NOT_FOUND;
3842    } else {
3843        LOGD("Re-using old request id");
3844        request_id = mCurrentRequestId;
3845    }
3846
3847    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3848                                    request->num_output_buffers,
3849                                    request->input_buffer,
3850                                    frameNumber);
3851    // Acquire all request buffers first
3852    streamID.num_streams = 0;
3853    int blob_request = 0;
3854    uint32_t snapshotStreamId = 0;
3855    for (size_t i = 0; i < request->num_output_buffers; i++) {
3856        const camera3_stream_buffer_t& output = request->output_buffers[i];
3857        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3858
3859        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3860            //Call function to store local copy of jpeg data for encode params.
3861            blob_request = 1;
3862            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3863        }
3864
3865        if (output.acquire_fence != -1) {
3866           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3867           close(output.acquire_fence);
3868           if (rc != OK) {
3869              LOGE("sync wait failed %d", rc);
3870              pthread_mutex_unlock(&mMutex);
3871              return rc;
3872           }
3873        }
3874
3875        streamID.streamID[streamID.num_streams] =
3876            channel->getStreamID(channel->getStreamTypeMask());
3877        streamID.num_streams++;
3878
3879        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3880            isVidBufRequested = true;
3881        }
3882    }
3883
3884    if (blob_request) {
3885        KPI_ATRACE_INT("SNAPSHOT", 1);
3886    }
3887    if (blob_request && mRawDumpChannel) {
3888        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3889        streamID.streamID[streamID.num_streams] =
3890            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3891        streamID.num_streams++;
3892    }
3893
3894    if(request->input_buffer == NULL) {
3895        /* Parse the settings:
3896         * - For every request in NORMAL MODE
3897         * - For every request in HFR mode during preview only case
3898         * - For first request of every batch in HFR mode during video
3899         * recording. In batchmode the same settings except frame number is
3900         * repeated in each request of the batch.
3901         */
3902        if (!mBatchSize ||
3903           (mBatchSize && !isVidBufRequested) ||
3904           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3905            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3906            if (rc < 0) {
3907                LOGE("fail to set frame parameters");
3908                pthread_mutex_unlock(&mMutex);
3909                return rc;
3910            }
3911        }
3912        /* For batchMode HFR, setFrameParameters is not called for every
3913         * request. But only frame number of the latest request is parsed.
3914         * Keep track of first and last frame numbers in a batch so that
3915         * metadata for the frame numbers of batch can be duplicated in
3916         * handleBatchMetadta */
3917        if (mBatchSize) {
3918            if (!mToBeQueuedVidBufs) {
3919                //start of the batch
3920                mFirstFrameNumberInBatch = request->frame_number;
3921            }
3922            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3923                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3924                LOGE("Failed to set the frame number in the parameters");
3925                return BAD_VALUE;
3926            }
3927        }
3928        if (mNeedSensorRestart) {
3929            /* Unlock the mutex as restartSensor waits on the channels to be
3930             * stopped, which in turn calls stream callback functions -
3931             * handleBufferWithLock and handleMetadataWithLock */
3932            pthread_mutex_unlock(&mMutex);
3933            rc = dynamicUpdateMetaStreamInfo();
3934            if (rc != NO_ERROR) {
3935                LOGE("Restarting the sensor failed");
3936                return BAD_VALUE;
3937            }
3938            mNeedSensorRestart = false;
3939            pthread_mutex_lock(&mMutex);
3940        }
3941    } else {
3942
3943        if (request->input_buffer->acquire_fence != -1) {
3944           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3945           close(request->input_buffer->acquire_fence);
3946           if (rc != OK) {
3947              LOGE("input buffer sync wait failed %d", rc);
3948              pthread_mutex_unlock(&mMutex);
3949              return rc;
3950           }
3951        }
3952    }
3953
3954    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3955        mLastCustIntentFrmNum = frameNumber;
3956    }
3957    /* Update pending request list and pending buffers map */
3958    PendingRequestInfo pendingRequest;
3959    pendingRequestIterator latestRequest;
3960    pendingRequest.frame_number = frameNumber;
3961    pendingRequest.num_buffers = request->num_output_buffers;
3962    pendingRequest.request_id = request_id;
3963    pendingRequest.blob_request = blob_request;
3964    pendingRequest.timestamp = 0;
3965    pendingRequest.bUrgentReceived = 0;
3966    if (request->input_buffer) {
3967        pendingRequest.input_buffer =
3968                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3969        *(pendingRequest.input_buffer) = *(request->input_buffer);
3970        pInputBuffer = pendingRequest.input_buffer;
3971    } else {
3972       pendingRequest.input_buffer = NULL;
3973       pInputBuffer = NULL;
3974    }
3975
3976    pendingRequest.pipeline_depth = 0;
3977    pendingRequest.partial_result_cnt = 0;
3978    extractJpegMetadata(mCurJpegMeta, request);
3979    pendingRequest.jpegMetadata = mCurJpegMeta;
3980    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3981    pendingRequest.shutter_notified = false;
3982
3983    //extract capture intent
3984    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3985        mCaptureIntent =
3986                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3987    }
3988    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
3989        mHybridAeEnable =
3990                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
3991    }
3992    pendingRequest.capture_intent = mCaptureIntent;
3993    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
3994    /* DevCamDebug metadata processCaptureRequest */
3995    if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
3996        mDevCamDebugMetaEnable =
3997                meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
3998    }
3999    /* DevCamDebug metadata end */
4000
4001    //extract CAC info
4002    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4003        mCacMode =
4004                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4005    }
4006    pendingRequest.fwkCacMode = mCacMode;
4007
4008    PendingBuffersInRequest bufsForCurRequest;
4009    bufsForCurRequest.frame_number = frameNumber;
4010    // Mark current timestamp for the new request
4011    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4012
4013    for (size_t i = 0; i < request->num_output_buffers; i++) {
4014        RequestedBufferInfo requestedBuf;
4015        memset(&requestedBuf, 0, sizeof(requestedBuf));
4016        requestedBuf.stream = request->output_buffers[i].stream;
4017        requestedBuf.buffer = NULL;
4018        pendingRequest.buffers.push_back(requestedBuf);
4019
4020        // Add to buffer handle the pending buffers list
4021        PendingBufferInfo bufferInfo;
4022        bufferInfo.buffer = request->output_buffers[i].buffer;
4023        bufferInfo.stream = request->output_buffers[i].stream;
4024        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4025        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4026        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4027            frameNumber, bufferInfo.buffer,
4028            channel->getStreamTypeMask(), bufferInfo.stream->format);
4029    }
4030    // Add this request packet into mPendingBuffersMap
4031    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4032    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4033        mPendingBuffersMap.get_num_overall_buffers());
4034
4035    latestRequest = mPendingRequestsList.insert(
4036            mPendingRequestsList.end(), pendingRequest);
4037    if(mFlush) {
4038        LOGI("mFlush is true");
4039        pthread_mutex_unlock(&mMutex);
4040        return NO_ERROR;
4041    }
4042
4043    // Notify metadata channel we receive a request
4044    mMetadataChannel->request(NULL, frameNumber);
4045
4046    if(request->input_buffer != NULL){
4047        LOGD("Input request, frame_number %d", frameNumber);
4048        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4049        if (NO_ERROR != rc) {
4050            LOGE("fail to set reproc parameters");
4051            pthread_mutex_unlock(&mMutex);
4052            return rc;
4053        }
4054    }
4055
4056    // Call request on other streams
4057    uint32_t streams_need_metadata = 0;
4058    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4059    for (size_t i = 0; i < request->num_output_buffers; i++) {
4060        const camera3_stream_buffer_t& output = request->output_buffers[i];
4061        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4062
4063        if (channel == NULL) {
4064            LOGW("invalid channel pointer for stream");
4065            continue;
4066        }
4067
4068        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4069            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4070                      output.buffer, request->input_buffer, frameNumber);
4071            if(request->input_buffer != NULL){
4072                rc = channel->request(output.buffer, frameNumber,
4073                        pInputBuffer, &mReprocMeta);
4074                if (rc < 0) {
4075                    LOGE("Fail to request on picture channel");
4076                    pthread_mutex_unlock(&mMutex);
4077                    return rc;
4078                }
4079            } else {
4080                LOGD("snapshot request with buffer %p, frame_number %d",
4081                         output.buffer, frameNumber);
4082                if (!request->settings) {
4083                    rc = channel->request(output.buffer, frameNumber,
4084                            NULL, mPrevParameters);
4085                } else {
4086                    rc = channel->request(output.buffer, frameNumber,
4087                            NULL, mParameters);
4088                }
4089                if (rc < 0) {
4090                    LOGE("Fail to request on picture channel");
4091                    pthread_mutex_unlock(&mMutex);
4092                    return rc;
4093                }
4094                pendingBufferIter->need_metadata = true;
4095                streams_need_metadata++;
4096            }
4097        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4098            bool needMetadata = false;
4099
4100            if (m_perfLock.isPerfLockTimedAcquired()) {
4101                if (m_perfLock.isTimerReset())
4102                {
4103                    m_perfLock.lock_rel_timed();
4104                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4105                }
4106            } else {
4107                m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4108            }
4109
4110            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4111            rc = yuvChannel->request(output.buffer, frameNumber,
4112                    pInputBuffer,
4113                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
4114            if (rc < 0) {
4115                LOGE("Fail to request on YUV channel");
4116                pthread_mutex_unlock(&mMutex);
4117                return rc;
4118            }
4119            pendingBufferIter->need_metadata = needMetadata;
4120            if (needMetadata)
4121                streams_need_metadata += 1;
4122            LOGD("calling YUV channel request, need_metadata is %d",
4123                     needMetadata);
4124        } else {
4125            LOGD("request with buffer %p, frame_number %d",
4126                  output.buffer, frameNumber);
4127            /* Set perf lock for API-2 zsl */
4128            if (IS_USAGE_ZSL(output.stream->usage)) {
4129                if (m_perfLock.isPerfLockTimedAcquired()) {
4130                    if (m_perfLock.isTimerReset())
4131                    {
4132                        m_perfLock.lock_rel_timed();
4133                        m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4134                    }
4135                } else {
4136                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
4137                }
4138            }
4139
4140            rc = channel->request(output.buffer, frameNumber);
4141            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4142                    && mBatchSize) {
4143                mToBeQueuedVidBufs++;
4144                if (mToBeQueuedVidBufs == mBatchSize) {
4145                    channel->queueBatchBuf();
4146                }
4147            }
4148            if (rc < 0) {
4149                LOGE("request failed");
4150                pthread_mutex_unlock(&mMutex);
4151                return rc;
4152            }
4153        }
4154        pendingBufferIter++;
4155    }
4156
4157    //If 2 streams have need_metadata set to true, fail the request, unless
4158    //we copy/reference count the metadata buffer
4159    if (streams_need_metadata > 1) {
4160        LOGE("not supporting request in which two streams requires"
4161                " 2 HAL metadata for reprocessing");
4162        pthread_mutex_unlock(&mMutex);
4163        return -EINVAL;
4164    }
4165
4166    if(request->input_buffer == NULL) {
4167        /* Set the parameters to backend:
4168         * - For every request in NORMAL MODE
4169         * - For every request in HFR mode during preview only case
4170         * - Once every batch in HFR mode during video recording
4171         */
4172        if (!mBatchSize ||
4173           (mBatchSize && !isVidBufRequested) ||
4174           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4175            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4176                     mBatchSize, isVidBufRequested,
4177                    mToBeQueuedVidBufs);
4178            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4179                    mParameters);
4180            if (rc < 0) {
4181                LOGE("set_parms failed");
4182            }
4183            /* reset to zero coz, the batch is queued */
4184            mToBeQueuedVidBufs = 0;
4185            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4186        }
4187        mPendingLiveRequest++;
4188    }
4189
4190    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4191
4192    mState = STARTED;
4193    // Added a timed condition wait
4194    struct timespec ts;
4195    uint8_t isValidTimeout = 1;
4196    rc = clock_gettime(CLOCK_REALTIME, &ts);
4197    if (rc < 0) {
4198      isValidTimeout = 0;
4199      LOGE("Error reading the real time clock!!");
4200    }
4201    else {
4202      // Make timeout as 5 sec for request to be honored
4203      ts.tv_sec += 5;
4204    }
4205
4206    if (m_perfLock.isPerfLockTimedAcquired() && m_perfLock.isTimerReset())
4207        m_perfLock.lock_rel_timed();
4208
4209    //Block on conditional variable
4210    while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
4211            (mState != ERROR) && (mState != DEINIT)) {
4212        if (!isValidTimeout) {
4213            LOGD("Blocking on conditional wait");
4214            pthread_cond_wait(&mRequestCond, &mMutex);
4215        }
4216        else {
4217            LOGD("Blocking on timed conditional wait");
4218            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4219            if (rc == ETIMEDOUT) {
4220                rc = -ENODEV;
4221                LOGE("Unblocked on timeout!!!!");
4222                break;
4223            }
4224        }
4225        LOGD("Unblocked");
4226        if (mWokenUpByDaemon) {
4227            mWokenUpByDaemon = false;
4228            if (mPendingLiveRequest < mMaxInFlightRequests)
4229                break;
4230        }
4231    }
4232    pthread_mutex_unlock(&mMutex);
4233
4234    return rc;
4235}
4236
4237/*===========================================================================
4238 * FUNCTION   : dump
4239 *
4240 * DESCRIPTION:
4241 *
4242 * PARAMETERS :
4243 *
4244 *
4245 * RETURN     :
4246 *==========================================================================*/
4247void QCamera3HardwareInterface::dump(int fd)
4248{
4249    pthread_mutex_lock(&mMutex);
4250    dprintf(fd, "\n Camera HAL3 information Begin \n");
4251
4252    dprintf(fd, "\nNumber of pending requests: %zu \n",
4253        mPendingRequestsList.size());
4254    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4255    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
4256    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4257    for(pendingRequestIterator i = mPendingRequestsList.begin();
4258            i != mPendingRequestsList.end(); i++) {
4259        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4260        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4261        i->input_buffer);
4262    }
4263    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4264                mPendingBuffersMap.get_num_overall_buffers());
4265    dprintf(fd, "-------+------------------\n");
4266    dprintf(fd, " Frame | Stream type mask \n");
4267    dprintf(fd, "-------+------------------\n");
4268    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4269        for(auto &j : req.mPendingBufferList) {
4270            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4271            dprintf(fd, " %5d | %11d \n",
4272                    req.frame_number, channel->getStreamTypeMask());
4273        }
4274    }
4275    dprintf(fd, "-------+------------------\n");
4276
4277    dprintf(fd, "\nPending frame drop list: %zu\n",
4278        mPendingFrameDropList.size());
4279    dprintf(fd, "-------+-----------\n");
4280    dprintf(fd, " Frame | Stream ID \n");
4281    dprintf(fd, "-------+-----------\n");
4282    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4283        i != mPendingFrameDropList.end(); i++) {
4284        dprintf(fd, " %5d | %9d \n",
4285            i->frame_number, i->stream_ID);
4286    }
4287    dprintf(fd, "-------+-----------\n");
4288
4289    dprintf(fd, "\n Camera HAL3 information End \n");
4290
4291    /* use dumpsys media.camera as trigger to send update debug level event */
4292    mUpdateDebugLevel = true;
4293    pthread_mutex_unlock(&mMutex);
4294    return;
4295}
4296
4297/*===========================================================================
4298 * FUNCTION   : flush
4299 *
4300 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4301 *              conditionally restarts channels
4302 *
4303 * PARAMETERS :
4304 *  @ restartChannels: re-start all channels
4305 *
4306 *
4307 * RETURN     :
4308 *          0 on success
4309 *          Error code on failure
4310 *==========================================================================*/
4311int QCamera3HardwareInterface::flush(bool restartChannels)
4312{
4313    KPI_ATRACE_CALL();
4314    int32_t rc = NO_ERROR;
4315
4316    LOGD("Unblocking Process Capture Request");
4317    pthread_mutex_lock(&mMutex);
4318    mFlush = true;
4319    pthread_mutex_unlock(&mMutex);
4320
4321    rc = stopAllChannels();
4322    // unlink of dualcam
4323    if (mIsDeviceLinked) {
4324        m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4325        pthread_mutex_lock(&gCamLock);
4326
4327        if (mIsMainCamera == 1) {
4328            m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4329            m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4330            // related session id should be session id of linked session
4331            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4332        } else {
4333            m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4334            m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4335            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4336        }
4337        pthread_mutex_unlock(&gCamLock);
4338
4339        rc = mCameraHandle->ops->sync_related_sensors(
4340                mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4341        if (rc < 0) {
4342            LOGE("Dualcam: Unlink failed, but still proceed to close");
4343        }
4344    }
4345
4346    if (rc < 0) {
4347        LOGE("stopAllChannels failed");
4348        return rc;
4349    }
4350    if (mChannelHandle) {
4351        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4352                mChannelHandle);
4353    }
4354
4355    // Reset bundle info
4356    rc = setBundleInfo();
4357    if (rc < 0) {
4358        LOGE("setBundleInfo failed %d", rc);
4359        return rc;
4360    }
4361
4362    // Mutex Lock
4363    pthread_mutex_lock(&mMutex);
4364
4365    // Unblock process_capture_request
4366    mPendingLiveRequest = 0;
4367    pthread_cond_signal(&mRequestCond);
4368
4369    rc = notifyErrorForPendingRequests();
4370    if (rc < 0) {
4371        LOGE("notifyErrorForPendingRequests failed");
4372        pthread_mutex_unlock(&mMutex);
4373        return rc;
4374    }
4375
4376    mFlush = false;
4377
4378    // Start the Streams/Channels
4379    if (restartChannels) {
4380        rc = startAllChannels();
4381        if (rc < 0) {
4382            LOGE("startAllChannels failed");
4383            pthread_mutex_unlock(&mMutex);
4384            return rc;
4385        }
4386    }
4387
4388    if (mChannelHandle) {
4389        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4390                    mChannelHandle);
4391        if (rc < 0) {
4392            LOGE("start_channel failed");
4393            pthread_mutex_unlock(&mMutex);
4394            return rc;
4395        }
4396    }
4397
4398    pthread_mutex_unlock(&mMutex);
4399
4400    return 0;
4401}
4402
4403/*===========================================================================
4404 * FUNCTION   : flushPerf
4405 *
4406 * DESCRIPTION: This is the performance optimization version of flush that does
4407 *              not use stream off, rather flushes the system
4408 *
4409 * PARAMETERS :
4410 *
4411 *
4412 * RETURN     : 0 : success
4413 *              -EINVAL: input is malformed (device is not valid)
4414 *              -ENODEV: if the device has encountered a serious error
4415 *==========================================================================*/
4416int QCamera3HardwareInterface::flushPerf()
4417{
4418    ATRACE_CALL();
4419    int32_t rc = 0;
4420    struct timespec timeout;
4421    bool timed_wait = false;
4422
4423    pthread_mutex_lock(&mMutex);
4424    mFlushPerf = true;
4425    mPendingBuffersMap.numPendingBufsAtFlush =
4426        mPendingBuffersMap.get_num_overall_buffers();
4427    LOGD("Calling flush. Wait for %d buffers to return",
4428        mPendingBuffersMap.numPendingBufsAtFlush);
4429
4430    /* send the flush event to the backend */
4431    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4432    if (rc < 0) {
4433        LOGE("Error in flush: IOCTL failure");
4434        mFlushPerf = false;
4435        pthread_mutex_unlock(&mMutex);
4436        return -ENODEV;
4437    }
4438
4439    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4440        LOGD("No pending buffers in HAL, return flush");
4441        mFlushPerf = false;
4442        pthread_mutex_unlock(&mMutex);
4443        return rc;
4444    }
4445
4446    /* wait on a signal that buffers were received */
4447    rc = clock_gettime(CLOCK_REALTIME, &timeout);
4448    if (rc < 0) {
4449        LOGE("Error reading the real time clock, cannot use timed wait");
4450    } else {
4451        timeout.tv_sec += FLUSH_TIMEOUT;
4452        timed_wait = true;
4453    }
4454
4455    //Block on conditional variable
4456    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4457        LOGD("Waiting on mBuffersCond");
4458        if (!timed_wait) {
4459            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4460            if (rc != 0) {
4461                 LOGE("pthread_cond_wait failed due to rc = %s",
4462                        strerror(rc));
4463                 break;
4464            }
4465        } else {
4466            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4467            if (rc != 0) {
4468                LOGE("pthread_cond_timedwait failed due to rc = %s",
4469                            strerror(rc));
4470                break;
4471            }
4472        }
4473    }
4474    if (rc != 0) {
4475        mFlushPerf = false;
4476        pthread_mutex_unlock(&mMutex);
4477        return -ENODEV;
4478    }
4479
4480    LOGD("Received buffers, now safe to return them");
4481
4482    //make sure the channels handle flush
4483    //currently only required for the picture channel to release snapshot resources
4484    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4485            it != mStreamInfo.end(); it++) {
4486        QCamera3Channel *channel = (*it)->channel;
4487        if (channel) {
4488            rc = channel->flush();
4489            if (rc) {
4490               LOGE("Flushing the channels failed with error %d", rc);
4491               // even though the channel flush failed we need to continue and
4492               // return the buffers we have to the framework, however the return
4493               // value will be an error
4494               rc = -ENODEV;
4495            }
4496        }
4497    }
4498
4499    /* notify the frameworks and send errored results */
4500    rc = notifyErrorForPendingRequests();
4501    if (rc < 0) {
4502        LOGE("notifyErrorForPendingRequests failed");
4503        pthread_mutex_unlock(&mMutex);
4504        return rc;
4505    }
4506
4507    //unblock process_capture_request
4508    mPendingLiveRequest = 0;
4509    unblockRequestIfNecessary();
4510
4511    mFlushPerf = false;
4512    pthread_mutex_unlock(&mMutex);
4513    LOGD ("Flush Operation complete. rc = %d", rc);
4514    return rc;
4515}
4516
4517/*===========================================================================
4518 * FUNCTION   : handleCameraDeviceError
4519 *
4520 * DESCRIPTION: This function calls internal flush and notifies the error to
4521 *              framework and updates the state variable.
4522 *
4523 * PARAMETERS : None
4524 *
4525 * RETURN     : NO_ERROR on Success
4526 *              Error code on failure
4527 *==========================================================================*/
4528int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4529{
4530    int32_t rc = NO_ERROR;
4531
4532    pthread_mutex_lock(&mMutex);
4533    if (mState != ERROR) {
4534        //if mState != ERROR, nothing to be done
4535        pthread_mutex_unlock(&mMutex);
4536        return NO_ERROR;
4537    }
4538    pthread_mutex_unlock(&mMutex);
4539
4540    rc = flush(false /* restart channels */);
4541    if (NO_ERROR != rc) {
4542        LOGE("internal flush to handle mState = ERROR failed");
4543    }
4544
4545    pthread_mutex_lock(&mMutex);
4546    mState = DEINIT;
4547    pthread_mutex_unlock(&mMutex);
4548
4549    camera3_notify_msg_t notify_msg;
4550    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4551    notify_msg.type = CAMERA3_MSG_ERROR;
4552    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4553    notify_msg.message.error.error_stream = NULL;
4554    notify_msg.message.error.frame_number = 0;
4555    mCallbackOps->notify(mCallbackOps, &notify_msg);
4556
4557    return rc;
4558}
4559
4560/*===========================================================================
4561 * FUNCTION   : captureResultCb
4562 *
4563 * DESCRIPTION: Callback handler for all capture result
4564 *              (streams, as well as metadata)
4565 *
4566 * PARAMETERS :
4567 *   @metadata : metadata information
4568 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4569 *               NULL if metadata.
4570 *
4571 * RETURN     : NONE
4572 *==========================================================================*/
4573void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4574                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4575{
4576    if (metadata_buf) {
4577        if (mBatchSize) {
4578            handleBatchMetadata(metadata_buf,
4579                    true /* free_and_bufdone_meta_buf */);
4580        } else { /* mBatchSize = 0 */
4581            hdrPlusPerfLock(metadata_buf);
4582            pthread_mutex_lock(&mMutex);
4583            handleMetadataWithLock(metadata_buf,
4584                    true /* free_and_bufdone_meta_buf */);
4585            pthread_mutex_unlock(&mMutex);
4586        }
4587    } else if (isInputBuffer) {
4588        pthread_mutex_lock(&mMutex);
4589        handleInputBufferWithLock(frame_number);
4590        pthread_mutex_unlock(&mMutex);
4591    } else {
4592        pthread_mutex_lock(&mMutex);
4593        handleBufferWithLock(buffer, frame_number);
4594        pthread_mutex_unlock(&mMutex);
4595    }
4596    return;
4597}
4598
4599/*===========================================================================
4600 * FUNCTION   : getReprocessibleOutputStreamId
4601 *
4602 * DESCRIPTION: Get source output stream id for the input reprocess stream
4603 *              based on size and format, which would be the largest
4604 *              output stream if an input stream exists.
4605 *
4606 * PARAMETERS :
4607 *   @id      : return the stream id if found
4608 *
4609 * RETURN     : int32_t type of status
4610 *              NO_ERROR  -- success
4611 *              none-zero failure code
4612 *==========================================================================*/
4613int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4614{
4615    /* check if any output or bidirectional stream with the same size and format
4616       and return that stream */
4617    if ((mInputStreamInfo.dim.width > 0) &&
4618            (mInputStreamInfo.dim.height > 0)) {
4619        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4620                it != mStreamInfo.end(); it++) {
4621
4622            camera3_stream_t *stream = (*it)->stream;
4623            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4624                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4625                    (stream->format == mInputStreamInfo.format)) {
4626                // Usage flag for an input stream and the source output stream
4627                // may be different.
4628                LOGD("Found reprocessible output stream! %p", *it);
4629                LOGD("input stream usage 0x%x, current stream usage 0x%x",
4630                         stream->usage, mInputStreamInfo.usage);
4631
4632                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4633                if (channel != NULL && channel->mStreams[0]) {
4634                    id = channel->mStreams[0]->getMyServerID();
4635                    return NO_ERROR;
4636                }
4637            }
4638        }
4639    } else {
4640        LOGD("No input stream, so no reprocessible output stream");
4641    }
4642    return NAME_NOT_FOUND;
4643}
4644
4645/*===========================================================================
4646 * FUNCTION   : lookupFwkName
4647 *
4648 * DESCRIPTION: In case the enum is not same in fwk and backend
4649 *              make sure the parameter is correctly propogated
4650 *
4651 * PARAMETERS  :
4652 *   @arr      : map between the two enums
4653 *   @len      : len of the map
4654 *   @hal_name : name of the hal_parm to map
4655 *
4656 * RETURN     : int type of status
4657 *              fwk_name  -- success
4658 *              none-zero failure code
4659 *==========================================================================*/
4660template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4661        size_t len, halType hal_name)
4662{
4663
4664    for (size_t i = 0; i < len; i++) {
4665        if (arr[i].hal_name == hal_name) {
4666            return arr[i].fwk_name;
4667        }
4668    }
4669
4670    /* Not able to find matching framework type is not necessarily
4671     * an error case. This happens when mm-camera supports more attributes
4672     * than the frameworks do */
4673    LOGH("Cannot find matching framework type");
4674    return NAME_NOT_FOUND;
4675}
4676
4677/*===========================================================================
4678 * FUNCTION   : lookupHalName
4679 *
4680 * DESCRIPTION: In case the enum is not same in fwk and backend
4681 *              make sure the parameter is correctly propogated
4682 *
4683 * PARAMETERS  :
4684 *   @arr      : map between the two enums
4685 *   @len      : len of the map
4686 *   @fwk_name : name of the hal_parm to map
4687 *
4688 * RETURN     : int32_t type of status
4689 *              hal_name  -- success
4690 *              none-zero failure code
4691 *==========================================================================*/
4692template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4693        size_t len, fwkType fwk_name)
4694{
4695    for (size_t i = 0; i < len; i++) {
4696        if (arr[i].fwk_name == fwk_name) {
4697            return arr[i].hal_name;
4698        }
4699    }
4700
4701    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4702    return NAME_NOT_FOUND;
4703}
4704
4705/*===========================================================================
4706 * FUNCTION   : lookupProp
4707 *
4708 * DESCRIPTION: lookup a value by its name
4709 *
4710 * PARAMETERS :
4711 *   @arr     : map between the two enums
4712 *   @len     : size of the map
4713 *   @name    : name to be looked up
4714 *
4715 * RETURN     : Value if found
4716 *              CAM_CDS_MODE_MAX if not found
4717 *==========================================================================*/
4718template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4719        size_t len, const char *name)
4720{
4721    if (name) {
4722        for (size_t i = 0; i < len; i++) {
4723            if (!strcmp(arr[i].desc, name)) {
4724                return arr[i].val;
4725            }
4726        }
4727    }
4728    return CAM_CDS_MODE_MAX;
4729}
4730
4731/*===========================================================================
4732 *
4733 * DESCRIPTION:
4734 *
4735 * PARAMETERS :
4736 *   @metadata : metadata information from callback
4737 *   @timestamp: metadata buffer timestamp
4738 *   @request_id: request id
4739 *   @jpegMetadata: additional jpeg metadata
4740 *   @hybrid_ae_enable: whether hybrid ae is enabled
4741 *   // DevCamDebug metadata
4742 *   @DevCamDebug_meta_enable: enable DevCamDebug meta
4743 *   // DevCamDebug metadata end
4744 *   @pprocDone: whether internal offline postprocsesing is done
4745 *
4746 * RETURN     : camera_metadata_t*
4747 *              metadata in a format specified by fwk
4748 *==========================================================================*/
4749camera_metadata_t*
4750QCamera3HardwareInterface::translateFromHalMetadata(
4751                                 metadata_buffer_t *metadata,
4752                                 nsecs_t timestamp,
4753                                 int32_t request_id,
4754                                 const CameraMetadata& jpegMetadata,
4755                                 uint8_t pipeline_depth,
4756                                 uint8_t capture_intent,
4757                                 uint8_t hybrid_ae_enable,
4758                                 /* DevCamDebug metadata translateFromHalMetadata argument */
4759                                 uint8_t DevCamDebug_meta_enable,
4760                                 /* DevCamDebug metadata end */
4761                                 bool pprocDone,
4762                                 uint8_t fwk_cacMode)
4763{
4764    CameraMetadata camMetadata;
4765    camera_metadata_t *resultMetadata;
4766
4767    if (jpegMetadata.entryCount())
4768        camMetadata.append(jpegMetadata);
4769
4770    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4771    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4772    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4773    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4774    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4775    // DevCamDebug metadata translateFromHalMetadata
4776    camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
4777    // DevCamDebug metadata translateFromHalMetadata AF
4778    IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
4779            CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
4780        int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
4781        camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
4782    }
4783    IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
4784            CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
4785        int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
4786        camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
4787    }
4788    IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
4789            CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
4790        int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
4791        camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
4792    }
4793    IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
4794            CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
4795        int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
4796        camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
4797    }
4798    IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
4799            CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
4800        int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
4801        camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
4802    }
4803    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
4804            CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
4805        int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos = *DevCamDebug_af_monitor_pdaf_target_pos;
4806        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS, &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
4807    }
4808    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
4809            CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
4810        int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence = *DevCamDebug_af_monitor_pdaf_confidence;
4811        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE, &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
4812    }
4813    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
4814            CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
4815        int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
4816        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS, &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
4817    }
4818    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
4819            CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
4820        int32_t fwk_DevCamDebug_af_monitor_tof_target_pos = *DevCamDebug_af_monitor_tof_target_pos;
4821        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS, &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
4822    }
4823    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
4824            CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
4825        int32_t fwk_DevCamDebug_af_monitor_tof_confidence = *DevCamDebug_af_monitor_tof_confidence;
4826        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE, &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
4827    }
4828    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
4829            CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
4830        int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
4831        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS, &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
4832    }
4833    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
4834            CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
4835        int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
4836        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT, &fwk_DevCamDebug_af_monitor_type_select, 1);
4837    }
4838    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
4839            CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
4840        int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
4841        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS, &fwk_DevCamDebug_af_monitor_refocus, 1);
4842    }
4843    IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
4844            CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
4845        int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
4846        camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS, &fwk_DevCamDebug_af_monitor_target_pos, 1);
4847    }
4848    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
4849            CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
4850        int32_t fwk_DevCamDebug_af_search_pdaf_target_pos = *DevCamDebug_af_search_pdaf_target_pos;
4851        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS, &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
4852    }
4853    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
4854            CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
4855        int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
4856        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS, &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
4857    }
4858    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
4859            CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
4860        int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
4861        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS, &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
4862    }
4863    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
4864            CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
4865        int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
4866        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS, &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
4867    }
4868    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
4869            CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
4870        int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
4871        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE, &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
4872    }
4873    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
4874            CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
4875        int32_t fwk_DevCamDebug_af_search_tof_target_pos = *DevCamDebug_af_search_tof_target_pos;
4876        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS, &fwk_DevCamDebug_af_search_tof_target_pos, 1);
4877    }
4878    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
4879            CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
4880        int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
4881        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS, &fwk_DevCamDebug_af_search_tof_next_pos, 1);
4882    }
4883    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
4884            CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
4885        int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
4886        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS, &fwk_DevCamDebug_af_search_tof_near_pos, 1);
4887    }
4888    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
4889            CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
4890        int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
4891        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS, &fwk_DevCamDebug_af_search_tof_far_pos, 1);
4892    }
4893    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
4894            CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
4895        int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
4896        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE, &fwk_DevCamDebug_af_search_tof_confidence, 1);
4897    }
4898    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
4899            CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
4900        int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
4901        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT, &fwk_DevCamDebug_af_search_type_select, 1);
4902    }
4903    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
4904            CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
4905        int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
4906        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS, &fwk_DevCamDebug_af_search_next_pos, 1);
4907    }
4908    IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
4909            CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
4910        int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
4911        camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS, &fwk_DevCamDebug_af_search_target_pos, 1);
4912    }
4913    // DevCamDebug metadata translateFromHalMetadata AEC
4914    IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
4915            CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
4916        int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
4917        camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
4918    }
4919    IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
4920            CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
4921        int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
4922        camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
4923    }
4924    IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
4925            CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
4926        int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
4927        camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
4928    }
4929    IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
4930            CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
4931        int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
4932        camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
4933    }
4934    IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
4935            CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
4936        int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
4937        camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
4938    }
4939    IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
4940            CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
4941        float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
4942        camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
4943    }
4944    IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
4945            CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
4946        int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
4947        camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
4948    }
4949    IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
4950            CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
4951        float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
4952        camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
4953    }
4954    // DevCamDebug metadata translateFromHalMetadata AWB
4955    IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
4956            CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
4957        float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
4958        camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
4959    }
4960    IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
4961            CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
4962        float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
4963        camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
4964    }
4965    IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
4966            CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
4967        float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
4968        camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
4969    }
4970    IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
4971            CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
4972        int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
4973        camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
4974    }
4975    IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
4976            CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
4977        int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
4978        camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
4979    }
4980    // DevCamDebug metadata end
4981
4982
4983    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4984        int64_t fwk_frame_number = *frame_number;
4985        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4986    }
4987
4988    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4989        int32_t fps_range[2];
4990        fps_range[0] = (int32_t)float_range->min_fps;
4991        fps_range[1] = (int32_t)float_range->max_fps;
4992        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4993                                      fps_range, 2);
4994        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4995             fps_range[0], fps_range[1]);
4996    }
4997
4998    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4999        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5000    }
5001
5002    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5003        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5004                METADATA_MAP_SIZE(SCENE_MODES_MAP),
5005                *sceneMode);
5006        if (NAME_NOT_FOUND != val) {
5007            uint8_t fwkSceneMode = (uint8_t)val;
5008            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5009            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5010                     fwkSceneMode);
5011        }
5012    }
5013
5014    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5015        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5016        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5017    }
5018
5019    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5020        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5021        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5022    }
5023
5024    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5025        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5026        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5027    }
5028
5029    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5030            CAM_INTF_META_EDGE_MODE, metadata) {
5031        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5032    }
5033
5034    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5035        uint8_t fwk_flashPower = (uint8_t) *flashPower;
5036        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5037    }
5038
5039    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5040        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5041    }
5042
5043    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5044        if (0 <= *flashState) {
5045            uint8_t fwk_flashState = (uint8_t) *flashState;
5046            if (!gCamCapability[mCameraId]->flash_available) {
5047                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5048            }
5049            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5050        }
5051    }
5052
5053    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5054        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5055        if (NAME_NOT_FOUND != val) {
5056            uint8_t fwk_flashMode = (uint8_t)val;
5057            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5058        }
5059    }
5060
5061    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5062        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5063        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5064    }
5065
5066    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5067        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5068    }
5069
5070    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5071        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5072    }
5073
5074    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5075        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5076    }
5077
5078    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5079        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5080        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5081    }
5082
5083    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5084        uint8_t fwk_videoStab = (uint8_t) *videoStab;
5085        LOGD("fwk_videoStab = %d", fwk_videoStab);
5086        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5087    } else {
5088        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5089        // and so hardcoding the Video Stab result to OFF mode.
5090        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5091        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
5092        LOGD("%s: EIS result default to OFF mode", __func__);
5093    }
5094
5095    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5096        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5097        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5098    }
5099
5100    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5101        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5102    }
5103
5104    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
5105        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
5106
5107        LOGD("dynamicblackLevel = %f %f %f %f",
5108          blackLevelSourcePattern->cam_black_level[0],
5109          blackLevelSourcePattern->cam_black_level[1],
5110          blackLevelSourcePattern->cam_black_level[2],
5111          blackLevelSourcePattern->cam_black_level[3]);
5112    }
5113
5114    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5115        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5116        float fwk_blackLevelInd[4];
5117
5118        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
5119        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
5120        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
5121        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
5122
5123        LOGD("applied dynamicblackLevel = %f %f %f %f",
5124          blackLevelAppliedPattern->cam_black_level[0],
5125          blackLevelAppliedPattern->cam_black_level[1],
5126          blackLevelAppliedPattern->cam_black_level[2],
5127          blackLevelAppliedPattern->cam_black_level[3]);
5128        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
5129
5130        // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5131        // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
5132        // depth space.
5133        fwk_blackLevelInd[0] /= 64.0;
5134        fwk_blackLevelInd[1] /= 64.0;
5135        fwk_blackLevelInd[2] /= 64.0;
5136        fwk_blackLevelInd[3] /= 64.0;
5137        camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
5138    }
5139
5140    // Fixed whitelevel is used by ISP/Sensor
5141    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5142            &gCamCapability[mCameraId]->white_level, 1);
5143
5144    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5145            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5146        int32_t scalerCropRegion[4];
5147        scalerCropRegion[0] = hScalerCropRegion->left;
5148        scalerCropRegion[1] = hScalerCropRegion->top;
5149        scalerCropRegion[2] = hScalerCropRegion->width;
5150        scalerCropRegion[3] = hScalerCropRegion->height;
5151
5152        // Adjust crop region from sensor output coordinate system to active
5153        // array coordinate system.
5154        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5155                scalerCropRegion[2], scalerCropRegion[3]);
5156
5157        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5158    }
5159
5160    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5161        LOGD("sensorExpTime = %lld", *sensorExpTime);
5162        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5163    }
5164
5165    IF_META_AVAILABLE(int64_t, sensorFameDuration,
5166            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5167        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5168        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5169    }
5170
5171    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5172            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5173        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5174        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5175                sensorRollingShutterSkew, 1);
5176    }
5177
5178    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5179        LOGD("sensorSensitivity = %d", *sensorSensitivity);
5180        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5181
5182        //calculate the noise profile based on sensitivity
5183        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5184        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5185        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5186        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5187            noise_profile[i]   = noise_profile_S;
5188            noise_profile[i+1] = noise_profile_O;
5189        }
5190        LOGD("noise model entry (S, O) is (%f, %f)",
5191                noise_profile_S, noise_profile_O);
5192        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5193                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5194    }
5195
5196    int32_t fwk_ispSensitivity = 100;
5197    IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5198        fwk_ispSensitivity = (int32_t) *ispSensitivity;
5199    }
5200    IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
5201        fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
5202    }
5203    camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5204
5205    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5206        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5207        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5208    }
5209
5210    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5211        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5212                *faceDetectMode);
5213        if (NAME_NOT_FOUND != val) {
5214            uint8_t fwk_faceDetectMode = (uint8_t)val;
5215            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5216
5217            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5218                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5219                        CAM_INTF_META_FACE_DETECTION, metadata) {
5220                    uint8_t numFaces = MIN(
5221                            faceDetectionInfo->num_faces_detected, MAX_ROI);
5222                    int32_t faceIds[MAX_ROI];
5223                    uint8_t faceScores[MAX_ROI];
5224                    int32_t faceRectangles[MAX_ROI * 4];
5225                    int32_t faceLandmarks[MAX_ROI * 6];
5226                    size_t j = 0, k = 0;
5227
5228                    for (size_t i = 0; i < numFaces; i++) {
5229                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5230                        // Adjust crop region from sensor output coordinate system to active
5231                        // array coordinate system.
5232                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5233                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
5234                                rect.width, rect.height);
5235
5236                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5237                                faceRectangles+j, -1);
5238
5239                        j+= 4;
5240                    }
5241                    if (numFaces <= 0) {
5242                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5243                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5244                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5245                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5246                    }
5247
5248                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5249                            numFaces);
5250                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5251                            faceRectangles, numFaces * 4U);
5252                    if (fwk_faceDetectMode ==
5253                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5254                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5255                                CAM_INTF_META_FACE_LANDMARK, metadata) {
5256
5257                            for (size_t i = 0; i < numFaces; i++) {
5258                                // Map the co-ordinate sensor output coordinate system to active
5259                                // array coordinate system.
5260                                mCropRegionMapper.toActiveArray(
5261                                        landmarks->face_landmarks[i].left_eye_center.x,
5262                                        landmarks->face_landmarks[i].left_eye_center.y);
5263                                mCropRegionMapper.toActiveArray(
5264                                        landmarks->face_landmarks[i].right_eye_center.x,
5265                                        landmarks->face_landmarks[i].right_eye_center.y);
5266                                mCropRegionMapper.toActiveArray(
5267                                        landmarks->face_landmarks[i].mouth_center.x,
5268                                        landmarks->face_landmarks[i].mouth_center.y);
5269
5270                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
5271                                k+= 6;
5272                            }
5273                        }
5274
5275                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5276                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5277                                faceLandmarks, numFaces * 6U);
5278                   }
5279                }
5280            }
5281        }
5282    }
5283
5284    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5285        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5286        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
5287    }
5288
5289    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5290            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5291        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5292        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5293    }
5294
5295    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5296            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5297        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5298                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5299    }
5300
5301    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5302            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5303        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5304                CAM_MAX_SHADING_MAP_HEIGHT);
5305        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5306                CAM_MAX_SHADING_MAP_WIDTH);
5307        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5308                lensShadingMap->lens_shading, 4U * map_width * map_height);
5309    }
5310
5311    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5312        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5313        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5314    }
5315
5316    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5317        //Populate CAM_INTF_META_TONEMAP_CURVES
5318        /* ch0 = G, ch 1 = B, ch 2 = R*/
5319        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5320            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5321                     tonemap->tonemap_points_cnt,
5322                    CAM_MAX_TONEMAP_CURVE_SIZE);
5323            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5324        }
5325
5326        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5327                        &tonemap->curves[0].tonemap_points[0][0],
5328                        tonemap->tonemap_points_cnt * 2);
5329
5330        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5331                        &tonemap->curves[1].tonemap_points[0][0],
5332                        tonemap->tonemap_points_cnt * 2);
5333
5334        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5335                        &tonemap->curves[2].tonemap_points[0][0],
5336                        tonemap->tonemap_points_cnt * 2);
5337    }
5338
5339    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5340            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5341        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5342                CC_GAINS_COUNT);
5343    }
5344
5345    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5346            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5347        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5348                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5349                CC_MATRIX_COLS * CC_MATRIX_ROWS);
5350    }
5351
5352    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5353            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5354        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5355            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5356                     toneCurve->tonemap_points_cnt,
5357                    CAM_MAX_TONEMAP_CURVE_SIZE);
5358            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5359        }
5360        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5361                (float*)toneCurve->curve.tonemap_points,
5362                toneCurve->tonemap_points_cnt * 2);
5363    }
5364
5365    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5366            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5367        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5368                predColorCorrectionGains->gains, 4);
5369    }
5370
5371    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5372            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5373        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5374                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5375                CC_MATRIX_ROWS * CC_MATRIX_COLS);
5376    }
5377
5378    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5379        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5380    }
5381
5382    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5383        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5384        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5385    }
5386
5387    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5388        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5389        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5390    }
5391
5392    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5393        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5394                *effectMode);
5395        if (NAME_NOT_FOUND != val) {
5396            uint8_t fwk_effectMode = (uint8_t)val;
5397            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5398        }
5399    }
5400
5401    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5402            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5403        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5404                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5405        if (NAME_NOT_FOUND != fwk_testPatternMode) {
5406            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5407        }
5408        int32_t fwk_testPatternData[4];
5409        fwk_testPatternData[0] = testPatternData->r;
5410        fwk_testPatternData[3] = testPatternData->b;
5411        switch (gCamCapability[mCameraId]->color_arrangement) {
5412        case CAM_FILTER_ARRANGEMENT_RGGB:
5413        case CAM_FILTER_ARRANGEMENT_GRBG:
5414            fwk_testPatternData[1] = testPatternData->gr;
5415            fwk_testPatternData[2] = testPatternData->gb;
5416            break;
5417        case CAM_FILTER_ARRANGEMENT_GBRG:
5418        case CAM_FILTER_ARRANGEMENT_BGGR:
5419            fwk_testPatternData[2] = testPatternData->gr;
5420            fwk_testPatternData[1] = testPatternData->gb;
5421            break;
5422        default:
5423            LOGE("color arrangement %d is not supported",
5424                gCamCapability[mCameraId]->color_arrangement);
5425            break;
5426        }
5427        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5428    }
5429
5430    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5431        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5432    }
5433
5434    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5435        String8 str((const char *)gps_methods);
5436        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5437    }
5438
5439    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5440        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5441    }
5442
5443    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5444        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5445    }
5446
5447    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5448        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5449        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5450    }
5451
5452    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5453        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5454        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5455    }
5456
5457    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5458        int32_t fwk_thumb_size[2];
5459        fwk_thumb_size[0] = thumb_size->width;
5460        fwk_thumb_size[1] = thumb_size->height;
5461        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5462    }
5463
5464    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5465        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5466                privateData,
5467                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5468    }
5469
5470    if (metadata->is_tuning_params_valid) {
5471        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5472        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5473        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5474
5475
5476        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5477                sizeof(uint32_t));
5478        data += sizeof(uint32_t);
5479
5480        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5481                sizeof(uint32_t));
5482        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5483        data += sizeof(uint32_t);
5484
5485        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5486                sizeof(uint32_t));
5487        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5488        data += sizeof(uint32_t);
5489
5490        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5491                sizeof(uint32_t));
5492        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5493        data += sizeof(uint32_t);
5494
5495        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5496                sizeof(uint32_t));
5497        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5498        data += sizeof(uint32_t);
5499
5500        metadata->tuning_params.tuning_mod3_data_size = 0;
5501        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5502                sizeof(uint32_t));
5503        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5504        data += sizeof(uint32_t);
5505
5506        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5507                TUNING_SENSOR_DATA_MAX);
5508        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5509                count);
5510        data += count;
5511
5512        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5513                TUNING_VFE_DATA_MAX);
5514        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5515                count);
5516        data += count;
5517
5518        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5519                TUNING_CPP_DATA_MAX);
5520        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5521                count);
5522        data += count;
5523
5524        count = MIN(metadata->tuning_params.tuning_cac_data_size,
5525                TUNING_CAC_DATA_MAX);
5526        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5527                count);
5528        data += count;
5529
5530        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5531                (int32_t *)(void *)tuning_meta_data_blob,
5532                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5533    }
5534
5535    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5536            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5537        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5538                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5539                NEUTRAL_COL_POINTS);
5540    }
5541
5542    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5543        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5544        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5545    }
5546
5547    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5548        int32_t aeRegions[REGIONS_TUPLE_COUNT];
5549        // Adjust crop region from sensor output coordinate system to active
5550        // array coordinate system.
5551        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5552                hAeRegions->rect.width, hAeRegions->rect.height);
5553
5554        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5555        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5556                REGIONS_TUPLE_COUNT);
5557        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5558                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5559                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5560                hAeRegions->rect.height);
5561    }
5562
5563    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5564        uint8_t fwk_afState = (uint8_t) *afState;
5565        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5566        LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5567    }
5568
5569    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5570        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5571    }
5572
5573    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5574        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5575    }
5576
5577    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5578        uint8_t fwk_lensState = *lensState;
5579        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5580    }
5581
5582    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5583        /*af regions*/
5584        int32_t afRegions[REGIONS_TUPLE_COUNT];
5585        // Adjust crop region from sensor output coordinate system to active
5586        // array coordinate system.
5587        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5588                hAfRegions->rect.width, hAfRegions->rect.height);
5589
5590        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5591        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5592                REGIONS_TUPLE_COUNT);
5593        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5594                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5595                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5596                hAfRegions->rect.height);
5597    }
5598
5599    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5600        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5601                *hal_ab_mode);
5602        if (NAME_NOT_FOUND != val) {
5603            uint8_t fwk_ab_mode = (uint8_t)val;
5604            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5605        }
5606    }
5607
5608    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5609        int val = lookupFwkName(SCENE_MODES_MAP,
5610                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5611        if (NAME_NOT_FOUND != val) {
5612            uint8_t fwkBestshotMode = (uint8_t)val;
5613            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5614            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5615        } else {
5616            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5617        }
5618    }
5619
5620    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5621         uint8_t fwk_mode = (uint8_t) *mode;
5622         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5623    }
5624
5625    /* Constant metadata values to be update*/
5626    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5627    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5628
5629    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5630    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5631
5632    int32_t hotPixelMap[2];
5633    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5634
5635    // CDS
5636    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5637        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5638    }
5639
5640    // TNR
5641    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5642        uint8_t tnr_enable       = tnr->denoise_enable;
5643        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5644
5645        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5646        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5647    }
5648
5649    // Reprocess crop data
5650    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5651        uint8_t cnt = crop_data->num_of_streams;
5652        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5653            // mm-qcamera-daemon only posts crop_data for streams
5654            // not linked to pproc. So no valid crop metadata is not
5655            // necessarily an error case.
5656            LOGD("No valid crop metadata entries");
5657        } else {
5658            uint32_t reproc_stream_id;
5659            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5660                LOGD("No reprocessible stream found, ignore crop data");
5661            } else {
5662                int rc = NO_ERROR;
5663                Vector<int32_t> roi_map;
5664                int32_t *crop = new int32_t[cnt*4];
5665                if (NULL == crop) {
5666                   rc = NO_MEMORY;
5667                }
5668                if (NO_ERROR == rc) {
5669                    int32_t streams_found = 0;
5670                    for (size_t i = 0; i < cnt; i++) {
5671                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5672                            if (pprocDone) {
5673                                // HAL already does internal reprocessing,
5674                                // either via reprocessing before JPEG encoding,
5675                                // or offline postprocessing for pproc bypass case.
5676                                crop[0] = 0;
5677                                crop[1] = 0;
5678                                crop[2] = mInputStreamInfo.dim.width;
5679                                crop[3] = mInputStreamInfo.dim.height;
5680                            } else {
5681                                crop[0] = crop_data->crop_info[i].crop.left;
5682                                crop[1] = crop_data->crop_info[i].crop.top;
5683                                crop[2] = crop_data->crop_info[i].crop.width;
5684                                crop[3] = crop_data->crop_info[i].crop.height;
5685                            }
5686                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5687                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5688                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5689                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5690                            streams_found++;
5691                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5692                                    crop[0], crop[1], crop[2], crop[3]);
5693                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5694                                    crop_data->crop_info[i].roi_map.left,
5695                                    crop_data->crop_info[i].roi_map.top,
5696                                    crop_data->crop_info[i].roi_map.width,
5697                                    crop_data->crop_info[i].roi_map.height);
5698                            break;
5699
5700                       }
5701                    }
5702                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5703                            &streams_found, 1);
5704                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5705                            crop, (size_t)(streams_found * 4));
5706                    if (roi_map.array()) {
5707                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5708                                roi_map.array(), roi_map.size());
5709                    }
5710               }
5711               if (crop) {
5712                   delete [] crop;
5713               }
5714            }
5715        }
5716    }
5717
5718    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5719        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5720        // so hardcoding the CAC result to OFF mode.
5721        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5722        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5723    } else {
5724        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5725            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5726                    *cacMode);
5727            if (NAME_NOT_FOUND != val) {
5728                uint8_t resultCacMode = (uint8_t)val;
5729                // check whether CAC result from CB is equal to Framework set CAC mode
5730                // If not equal then set the CAC mode came in corresponding request
5731                if (fwk_cacMode != resultCacMode) {
5732                    resultCacMode = fwk_cacMode;
5733                }
5734                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5735                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5736            } else {
5737                LOGE("Invalid CAC camera parameter: %d", *cacMode);
5738            }
5739        }
5740    }
5741
5742    // Post blob of cam_cds_data through vendor tag.
5743    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5744        uint8_t cnt = cdsInfo->num_of_streams;
5745        cam_cds_data_t cdsDataOverride;
5746        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5747        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5748        cdsDataOverride.num_of_streams = 1;
5749        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5750            uint32_t reproc_stream_id;
5751            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5752                LOGD("No reprocessible stream found, ignore cds data");
5753            } else {
5754                for (size_t i = 0; i < cnt; i++) {
5755                    if (cdsInfo->cds_info[i].stream_id ==
5756                            reproc_stream_id) {
5757                        cdsDataOverride.cds_info[0].cds_enable =
5758                                cdsInfo->cds_info[i].cds_enable;
5759                        break;
5760                    }
5761                }
5762            }
5763        } else {
5764            LOGD("Invalid stream count %d in CDS_DATA", cnt);
5765        }
5766        camMetadata.update(QCAMERA3_CDS_INFO,
5767                (uint8_t *)&cdsDataOverride,
5768                sizeof(cam_cds_data_t));
5769    }
5770
5771    // Ldaf calibration data
5772    if (!mLdafCalibExist) {
5773        IF_META_AVAILABLE(uint32_t, ldafCalib,
5774                CAM_INTF_META_LDAF_EXIF, metadata) {
5775            mLdafCalibExist = true;
5776            mLdafCalib[0] = ldafCalib[0];
5777            mLdafCalib[1] = ldafCalib[1];
5778            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5779                    ldafCalib[0], ldafCalib[1]);
5780        }
5781    }
5782
5783    // AF scene change
5784    IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
5785        camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
5786    }
5787
5788    resultMetadata = camMetadata.release();
5789    return resultMetadata;
5790}
5791
5792/*===========================================================================
5793 * FUNCTION   : saveExifParams
5794 *
5795 * DESCRIPTION:
5796 *
5797 * PARAMETERS :
5798 *   @metadata : metadata information from callback
5799 *
5800 * RETURN     : none
5801 *
5802 *==========================================================================*/
5803void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5804{
5805    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5806            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5807        if (mExifParams.debug_params) {
5808            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5809            mExifParams.debug_params->ae_debug_params_valid = TRUE;
5810        }
5811    }
5812    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5813            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5814        if (mExifParams.debug_params) {
5815            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5816            mExifParams.debug_params->awb_debug_params_valid = TRUE;
5817        }
5818    }
5819    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5820            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5821        if (mExifParams.debug_params) {
5822            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5823            mExifParams.debug_params->af_debug_params_valid = TRUE;
5824        }
5825    }
5826    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5827            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5828        if (mExifParams.debug_params) {
5829            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5830            mExifParams.debug_params->asd_debug_params_valid = TRUE;
5831        }
5832    }
5833    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5834            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5835        if (mExifParams.debug_params) {
5836            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5837            mExifParams.debug_params->stats_debug_params_valid = TRUE;
5838        }
5839    }
5840    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
5841            CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
5842        if (mExifParams.debug_params) {
5843            mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
5844            mExifParams.debug_params->bestats_debug_params_valid = TRUE;
5845        }
5846    }
5847    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
5848            CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
5849        if (mExifParams.debug_params) {
5850            mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
5851            mExifParams.debug_params->bhist_debug_params_valid = TRUE;
5852        }
5853    }
5854    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
5855            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
5856        if (mExifParams.debug_params) {
5857            mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
5858            mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
5859        }
5860    }
5861}
5862
5863/*===========================================================================
5864 * FUNCTION   : get3AExifParams
5865 *
5866 * DESCRIPTION:
5867 *
5868 * PARAMETERS : none
5869 *
5870 *
5871 * RETURN     : mm_jpeg_exif_params_t
5872 *
5873 *==========================================================================*/
5874mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5875{
5876    return mExifParams;
5877}
5878
5879/*===========================================================================
5880 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
5881 *
5882 * DESCRIPTION:
5883 *
5884 * PARAMETERS :
5885 *   @metadata : metadata information from callback
5886 *
5887 * RETURN     : camera_metadata_t*
5888 *              metadata in a format specified by fwk
5889 *==========================================================================*/
5890camera_metadata_t*
5891QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5892                                (metadata_buffer_t *metadata)
5893{
5894    CameraMetadata camMetadata;
5895    camera_metadata_t *resultMetadata;
5896
5897
5898    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5899        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5900        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5901        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
5902    }
5903
5904    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5905        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5906                &aecTrigger->trigger, 1);
5907        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5908                &aecTrigger->trigger_id, 1);
5909        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5910                 aecTrigger->trigger);
5911        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
5912                aecTrigger->trigger_id);
5913    }
5914
5915    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5916        uint8_t fwk_ae_state = (uint8_t) *ae_state;
5917        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5918        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
5919    }
5920
5921    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5922        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5923        if (NAME_NOT_FOUND != val) {
5924            uint8_t fwkAfMode = (uint8_t)val;
5925            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5926            LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5927        } else {
5928            LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5929                    val);
5930        }
5931    }
5932
5933    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5934        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5935                &af_trigger->trigger, 1);
5936        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5937                 af_trigger->trigger);
5938        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5939        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
5940                af_trigger->trigger_id);
5941    }
5942
5943    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5944        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5945                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5946        if (NAME_NOT_FOUND != val) {
5947            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5948            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5949            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
5950        } else {
5951            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
5952        }
5953    }
5954
5955    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5956    uint32_t aeMode = CAM_AE_MODE_MAX;
5957    int32_t flashMode = CAM_FLASH_MODE_MAX;
5958    int32_t redeye = -1;
5959    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5960        aeMode = *pAeMode;
5961    }
5962    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5963        flashMode = *pFlashMode;
5964    }
5965    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5966        redeye = *pRedeye;
5967    }
5968
5969    if (1 == redeye) {
5970        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5971        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5972    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5973        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5974                flashMode);
5975        if (NAME_NOT_FOUND != val) {
5976            fwk_aeMode = (uint8_t)val;
5977            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5978        } else {
5979            LOGE("Unsupported flash mode %d", flashMode);
5980        }
5981    } else if (aeMode == CAM_AE_MODE_ON) {
5982        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5983        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5984    } else if (aeMode == CAM_AE_MODE_OFF) {
5985        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5986        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5987    } else {
5988        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5989              "flashMode:%d, aeMode:%u!!!",
5990                 redeye, flashMode, aeMode);
5991    }
5992
5993    resultMetadata = camMetadata.release();
5994    return resultMetadata;
5995}
5996
5997/*===========================================================================
5998 * FUNCTION   : dumpMetadataToFile
5999 *
6000 * DESCRIPTION: Dumps tuning metadata to file system
6001 *
6002 * PARAMETERS :
6003 *   @meta           : tuning metadata
6004 *   @dumpFrameCount : current dump frame count
6005 *   @enabled        : Enable mask
6006 *
6007 *==========================================================================*/
6008void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6009                                                   uint32_t &dumpFrameCount,
6010                                                   bool enabled,
6011                                                   const char *type,
6012                                                   uint32_t frameNumber)
6013{
6014    //Some sanity checks
6015    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6016        LOGE("Tuning sensor data size bigger than expected %d: %d",
6017              meta.tuning_sensor_data_size,
6018              TUNING_SENSOR_DATA_MAX);
6019        return;
6020    }
6021
6022    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6023        LOGE("Tuning VFE data size bigger than expected %d: %d",
6024              meta.tuning_vfe_data_size,
6025              TUNING_VFE_DATA_MAX);
6026        return;
6027    }
6028
6029    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6030        LOGE("Tuning CPP data size bigger than expected %d: %d",
6031              meta.tuning_cpp_data_size,
6032              TUNING_CPP_DATA_MAX);
6033        return;
6034    }
6035
6036    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6037        LOGE("Tuning CAC data size bigger than expected %d: %d",
6038              meta.tuning_cac_data_size,
6039              TUNING_CAC_DATA_MAX);
6040        return;
6041    }
6042    //
6043
6044    if(enabled){
6045        char timeBuf[FILENAME_MAX];
6046        char buf[FILENAME_MAX];
6047        memset(buf, 0, sizeof(buf));
6048        memset(timeBuf, 0, sizeof(timeBuf));
6049        time_t current_time;
6050        struct tm * timeinfo;
6051        time (&current_time);
6052        timeinfo = localtime (&current_time);
6053        if (timeinfo != NULL) {
6054            /* Consistent naming for Jpeg+meta+raw: meta name */
6055            strftime (timeBuf, sizeof(timeBuf),
6056                    QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
6057            /* Consistent naming for Jpeg+meta+raw: meta name end*/
6058        }
6059        String8 filePath(timeBuf);
6060         /* Consistent naming for Jpeg+meta+raw */
6061        snprintf(buf,
6062                sizeof(buf),
6063                "%dm_%s_%d.bin",
6064                dumpFrameCount,
6065                type,
6066                frameNumber);
6067         /* Consistent naming for Jpeg+meta+raw end */
6068        filePath.append(buf);
6069        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6070        if (file_fd >= 0) {
6071            ssize_t written_len = 0;
6072            meta.tuning_data_version = TUNING_DATA_VERSION;
6073            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6074            written_len += write(file_fd, data, sizeof(uint32_t));
6075            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6076            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6077            written_len += write(file_fd, data, sizeof(uint32_t));
6078            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6079            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6080            written_len += write(file_fd, data, sizeof(uint32_t));
6081            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6082            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6083            written_len += write(file_fd, data, sizeof(uint32_t));
6084            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6085            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6086            written_len += write(file_fd, data, sizeof(uint32_t));
6087            meta.tuning_mod3_data_size = 0;
6088            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6089            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6090            written_len += write(file_fd, data, sizeof(uint32_t));
6091            size_t total_size = meta.tuning_sensor_data_size;
6092            data = (void *)((uint8_t *)&meta.data);
6093            written_len += write(file_fd, data, total_size);
6094            total_size = meta.tuning_vfe_data_size;
6095            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6096            written_len += write(file_fd, data, total_size);
6097            total_size = meta.tuning_cpp_data_size;
6098            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6099            written_len += write(file_fd, data, total_size);
6100            total_size = meta.tuning_cac_data_size;
6101            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6102            written_len += write(file_fd, data, total_size);
6103            close(file_fd);
6104        }else {
6105            LOGE("fail to open file for metadata dumping");
6106        }
6107    }
6108}
6109
6110/*===========================================================================
6111 * FUNCTION   : cleanAndSortStreamInfo
6112 *
6113 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6114 *              and sort them such that raw stream is at the end of the list
6115 *              This is a workaround for camera daemon constraint.
6116 *
6117 * PARAMETERS : None
6118 *
6119 *==========================================================================*/
6120void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6121{
6122    List<stream_info_t *> newStreamInfo;
6123
6124    /*clean up invalid streams*/
6125    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6126            it != mStreamInfo.end();) {
6127        if(((*it)->status) == INVALID){
6128            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6129            delete channel;
6130            free(*it);
6131            it = mStreamInfo.erase(it);
6132        } else {
6133            it++;
6134        }
6135    }
6136
6137    // Move preview/video/callback/snapshot streams into newList
6138    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6139            it != mStreamInfo.end();) {
6140        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6141                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6142                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6143            newStreamInfo.push_back(*it);
6144            it = mStreamInfo.erase(it);
6145        } else
6146            it++;
6147    }
6148    // Move raw streams into newList
6149    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6150            it != mStreamInfo.end();) {
6151        newStreamInfo.push_back(*it);
6152        it = mStreamInfo.erase(it);
6153    }
6154
6155    mStreamInfo = newStreamInfo;
6156}
6157
6158/*===========================================================================
6159 * FUNCTION   : extractJpegMetadata
6160 *
6161 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6162 *              JPEG metadata is cached in HAL, and return as part of capture
6163 *              result when metadata is returned from camera daemon.
6164 *
6165 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6166 *              @request:      capture request
6167 *
6168 *==========================================================================*/
6169void QCamera3HardwareInterface::extractJpegMetadata(
6170        CameraMetadata& jpegMetadata,
6171        const camera3_capture_request_t *request)
6172{
6173    CameraMetadata frame_settings;
6174    frame_settings = request->settings;
6175
6176    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6177        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6178                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6179                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6180
6181    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6182        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6183                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6184                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6185
6186    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6187        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6188                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6189                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6190
6191    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6192        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6193                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6194                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6195
6196    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6197        jpegMetadata.update(ANDROID_JPEG_QUALITY,
6198                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6199                frame_settings.find(ANDROID_JPEG_QUALITY).count);
6200
6201    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6202        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6203                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6204                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6205
6206    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6207        int32_t thumbnail_size[2];
6208        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6209        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6210        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6211            int32_t orientation =
6212                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
6213            if ((orientation == 90) || (orientation == 270)) {
6214               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6215               int32_t temp;
6216               temp = thumbnail_size[0];
6217               thumbnail_size[0] = thumbnail_size[1];
6218               thumbnail_size[1] = temp;
6219            }
6220         }
6221         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6222                thumbnail_size,
6223                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6224    }
6225
6226}
6227
6228/*===========================================================================
6229 * FUNCTION   : convertToRegions
6230 *
6231 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6232 *
6233 * PARAMETERS :
6234 *   @rect   : cam_rect_t struct to convert
6235 *   @region : int32_t destination array
6236 *   @weight : if we are converting from cam_area_t, weight is valid
6237 *             else weight = -1
6238 *
6239 *==========================================================================*/
6240void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6241        int32_t *region, int weight)
6242{
6243    region[0] = rect.left;
6244    region[1] = rect.top;
6245    region[2] = rect.left + rect.width;
6246    region[3] = rect.top + rect.height;
6247    if (weight > -1) {
6248        region[4] = weight;
6249    }
6250}
6251
6252/*===========================================================================
6253 * FUNCTION   : convertFromRegions
6254 *
6255 * DESCRIPTION: helper method to convert from array to cam_rect_t
6256 *
6257 * PARAMETERS :
6258 *   @rect   : cam_rect_t struct to convert
6259 *   @region : int32_t destination array
6260 *   @weight : if we are converting from cam_area_t, weight is valid
6261 *             else weight = -1
6262 *
6263 *==========================================================================*/
6264void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6265        const camera_metadata_t *settings, uint32_t tag)
6266{
6267    CameraMetadata frame_settings;
6268    frame_settings = settings;
6269    int32_t x_min = frame_settings.find(tag).data.i32[0];
6270    int32_t y_min = frame_settings.find(tag).data.i32[1];
6271    int32_t x_max = frame_settings.find(tag).data.i32[2];
6272    int32_t y_max = frame_settings.find(tag).data.i32[3];
6273    roi.weight = frame_settings.find(tag).data.i32[4];
6274    roi.rect.left = x_min;
6275    roi.rect.top = y_min;
6276    roi.rect.width = x_max - x_min;
6277    roi.rect.height = y_max - y_min;
6278}
6279
6280/*===========================================================================
6281 * FUNCTION   : resetIfNeededROI
6282 *
6283 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6284 *              crop region
6285 *
6286 * PARAMETERS :
6287 *   @roi       : cam_area_t struct to resize
6288 *   @scalerCropRegion : cam_crop_region_t region to compare against
6289 *
6290 *
6291 *==========================================================================*/
6292bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6293                                                 const cam_crop_region_t* scalerCropRegion)
6294{
6295    int32_t roi_x_max = roi->rect.width + roi->rect.left;
6296    int32_t roi_y_max = roi->rect.height + roi->rect.top;
6297    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6298    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6299
6300    /* According to spec weight = 0 is used to indicate roi needs to be disabled
6301     * without having this check the calculations below to validate if the roi
6302     * is inside scalar crop region will fail resulting in the roi not being
6303     * reset causing algorithm to continue to use stale roi window
6304     */
6305    if (roi->weight == 0) {
6306        return true;
6307    }
6308
6309    if ((roi_x_max < scalerCropRegion->left) ||
6310        // right edge of roi window is left of scalar crop's left edge
6311        (roi_y_max < scalerCropRegion->top)  ||
6312        // bottom edge of roi window is above scalar crop's top edge
6313        (roi->rect.left > crop_x_max) ||
6314        // left edge of roi window is beyond(right) of scalar crop's right edge
6315        (roi->rect.top > crop_y_max)){
6316        // top edge of roi windo is above scalar crop's top edge
6317        return false;
6318    }
6319    if (roi->rect.left < scalerCropRegion->left) {
6320        roi->rect.left = scalerCropRegion->left;
6321    }
6322    if (roi->rect.top < scalerCropRegion->top) {
6323        roi->rect.top = scalerCropRegion->top;
6324    }
6325    if (roi_x_max > crop_x_max) {
6326        roi_x_max = crop_x_max;
6327    }
6328    if (roi_y_max > crop_y_max) {
6329        roi_y_max = crop_y_max;
6330    }
6331    roi->rect.width = roi_x_max - roi->rect.left;
6332    roi->rect.height = roi_y_max - roi->rect.top;
6333    return true;
6334}
6335
6336/*===========================================================================
6337 * FUNCTION   : convertLandmarks
6338 *
6339 * DESCRIPTION: helper method to extract the landmarks from face detection info
6340 *
6341 * PARAMETERS :
6342 *   @landmark_data : input landmark data to be converted
6343 *   @landmarks : int32_t destination array
6344 *
6345 *
6346 *==========================================================================*/
6347void QCamera3HardwareInterface::convertLandmarks(
6348        cam_face_landmarks_info_t landmark_data,
6349        int32_t *landmarks)
6350{
6351    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
6352    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
6353    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
6354    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
6355    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
6356    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
6357}
6358
6359#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6360/*===========================================================================
6361 * FUNCTION   : initCapabilities
6362 *
6363 * DESCRIPTION: initialize camera capabilities in static data struct
6364 *
6365 * PARAMETERS :
6366 *   @cameraId  : camera Id
6367 *
6368 * RETURN     : int32_t type of status
6369 *              NO_ERROR  -- success
6370 *              none-zero failure code
6371 *==========================================================================*/
6372int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6373{
6374    int rc = 0;
6375    mm_camera_vtbl_t *cameraHandle = NULL;
6376    QCamera3HeapMemory *capabilityHeap = NULL;
6377
6378    rc = camera_open((uint8_t)cameraId, &cameraHandle);
6379    if (rc) {
6380        LOGE("camera_open failed. rc = %d", rc);
6381        goto open_failed;
6382    }
6383    if (!cameraHandle) {
6384        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6385        goto open_failed;
6386    }
6387
6388    capabilityHeap = new QCamera3HeapMemory(1);
6389    if (capabilityHeap == NULL) {
6390        LOGE("creation of capabilityHeap failed");
6391        goto heap_creation_failed;
6392    }
6393    /* Allocate memory for capability buffer */
6394    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6395    if(rc != OK) {
6396        LOGE("No memory for cappability");
6397        goto allocate_failed;
6398    }
6399
6400    /* Map memory for capability buffer */
6401    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6402    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6403                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
6404                                capabilityHeap->getFd(0),
6405                                sizeof(cam_capability_t),
6406                                capabilityHeap->getPtr(0));
6407    if(rc < 0) {
6408        LOGE("failed to map capability buffer");
6409        goto map_failed;
6410    }
6411
6412    /* Query Capability */
6413    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6414    if(rc < 0) {
6415        LOGE("failed to query capability");
6416        goto query_failed;
6417    }
6418    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6419    if (!gCamCapability[cameraId]) {
6420        LOGE("out of memory");
6421        goto query_failed;
6422    }
6423    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6424                                        sizeof(cam_capability_t));
6425
6426    int index;
6427    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6428        cam_analysis_info_t *p_analysis_info =
6429                &gCamCapability[cameraId]->analysis_info[index];
6430        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6431        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6432    }
6433    rc = 0;
6434
6435query_failed:
6436    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6437                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
6438map_failed:
6439    capabilityHeap->deallocate();
6440allocate_failed:
6441    delete capabilityHeap;
6442heap_creation_failed:
6443    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6444    cameraHandle = NULL;
6445open_failed:
6446    return rc;
6447}
6448
6449/*==========================================================================
6450 * FUNCTION   : get3Aversion
6451 *
6452 * DESCRIPTION: get the Q3A S/W version
6453 *
6454 * PARAMETERS :
6455 *  @sw_version: Reference of Q3A structure which will hold version info upon
6456 *               return
6457 *
6458 * RETURN     : None
6459 *
6460 *==========================================================================*/
6461void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6462{
6463    if(gCamCapability[mCameraId])
6464        sw_version = gCamCapability[mCameraId]->q3a_version;
6465    else
6466        LOGE("Capability structure NULL!");
6467}
6468
6469
6470/*===========================================================================
6471 * FUNCTION   : initParameters
6472 *
6473 * DESCRIPTION: initialize camera parameters
6474 *
6475 * PARAMETERS :
6476 *
6477 * RETURN     : int32_t type of status
6478 *              NO_ERROR  -- success
6479 *              none-zero failure code
6480 *==========================================================================*/
6481int QCamera3HardwareInterface::initParameters()
6482{
6483    int rc = 0;
6484
6485    //Allocate Set Param Buffer
6486    mParamHeap = new QCamera3HeapMemory(1);
6487    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6488    if(rc != OK) {
6489        rc = NO_MEMORY;
6490        LOGE("Failed to allocate SETPARM Heap memory");
6491        delete mParamHeap;
6492        mParamHeap = NULL;
6493        return rc;
6494    }
6495
6496    //Map memory for parameters buffer
6497    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6498            CAM_MAPPING_BUF_TYPE_PARM_BUF,
6499            mParamHeap->getFd(0),
6500            sizeof(metadata_buffer_t),
6501            (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6502    if(rc < 0) {
6503        LOGE("failed to map SETPARM buffer");
6504        rc = FAILED_TRANSACTION;
6505        mParamHeap->deallocate();
6506        delete mParamHeap;
6507        mParamHeap = NULL;
6508        return rc;
6509    }
6510
6511    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6512
6513    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6514    return rc;
6515}
6516
6517/*===========================================================================
6518 * FUNCTION   : deinitParameters
6519 *
6520 * DESCRIPTION: de-initialize camera parameters
6521 *
6522 * PARAMETERS :
6523 *
6524 * RETURN     : NONE
6525 *==========================================================================*/
6526void QCamera3HardwareInterface::deinitParameters()
6527{
6528    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6529            CAM_MAPPING_BUF_TYPE_PARM_BUF);
6530
6531    mParamHeap->deallocate();
6532    delete mParamHeap;
6533    mParamHeap = NULL;
6534
6535    mParameters = NULL;
6536
6537    free(mPrevParameters);
6538    mPrevParameters = NULL;
6539}
6540
6541/*===========================================================================
6542 * FUNCTION   : calcMaxJpegSize
6543 *
6544 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6545 *
6546 * PARAMETERS :
6547 *
6548 * RETURN     : max_jpeg_size
6549 *==========================================================================*/
6550size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6551{
6552    size_t max_jpeg_size = 0;
6553    size_t temp_width, temp_height;
6554    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6555            MAX_SIZES_CNT);
6556    for (size_t i = 0; i < count; i++) {
6557        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6558        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6559        if (temp_width * temp_height > max_jpeg_size ) {
6560            max_jpeg_size = temp_width * temp_height;
6561        }
6562    }
6563    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6564    return max_jpeg_size;
6565}
6566
6567/*===========================================================================
6568 * FUNCTION   : getMaxRawSize
6569 *
6570 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6571 *
6572 * PARAMETERS :
6573 *
6574 * RETURN     : Largest supported Raw Dimension
6575 *==========================================================================*/
6576cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6577{
6578    int max_width = 0;
6579    cam_dimension_t maxRawSize;
6580
6581    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6582    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6583        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6584            max_width = gCamCapability[camera_id]->raw_dim[i].width;
6585            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6586        }
6587    }
6588    return maxRawSize;
6589}
6590
6591
6592/*===========================================================================
6593 * FUNCTION   : calcMaxJpegDim
6594 *
6595 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6596 *
6597 * PARAMETERS :
6598 *
6599 * RETURN     : max_jpeg_dim
6600 *==========================================================================*/
6601cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6602{
6603    cam_dimension_t max_jpeg_dim;
6604    cam_dimension_t curr_jpeg_dim;
6605    max_jpeg_dim.width = 0;
6606    max_jpeg_dim.height = 0;
6607    curr_jpeg_dim.width = 0;
6608    curr_jpeg_dim.height = 0;
6609    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6610        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6611        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6612        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6613            max_jpeg_dim.width * max_jpeg_dim.height ) {
6614            max_jpeg_dim.width = curr_jpeg_dim.width;
6615            max_jpeg_dim.height = curr_jpeg_dim.height;
6616        }
6617    }
6618    return max_jpeg_dim;
6619}
6620
6621/*===========================================================================
6622 * FUNCTION   : addStreamConfig
6623 *
6624 * DESCRIPTION: adds the stream configuration to the array
6625 *
6626 * PARAMETERS :
6627 * @available_stream_configs : pointer to stream configuration array
6628 * @scalar_format            : scalar format
6629 * @dim                      : configuration dimension
6630 * @config_type              : input or output configuration type
6631 *
6632 * RETURN     : NONE
6633 *==========================================================================*/
6634void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6635        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6636{
6637    available_stream_configs.add(scalar_format);
6638    available_stream_configs.add(dim.width);
6639    available_stream_configs.add(dim.height);
6640    available_stream_configs.add(config_type);
6641}
6642
6643/*===========================================================================
6644 * FUNCTION   : suppportBurstCapture
6645 *
6646 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6647 *
6648 * PARAMETERS :
6649 *   @cameraId  : camera Id
6650 *
6651 * RETURN     : true if camera supports BURST_CAPTURE
6652 *              false otherwise
6653 *==========================================================================*/
6654bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6655{
6656    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6657    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6658    const int32_t highResWidth = 3264;
6659    const int32_t highResHeight = 2448;
6660
6661    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6662        // Maximum resolution images cannot be captured at >= 10fps
6663        // -> not supporting BURST_CAPTURE
6664        return false;
6665    }
6666
6667    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6668        // Maximum resolution images can be captured at >= 20fps
6669        // --> supporting BURST_CAPTURE
6670        return true;
6671    }
6672
6673    // Find the smallest highRes resolution, or largest resolution if there is none
6674    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6675            MAX_SIZES_CNT);
6676    size_t highRes = 0;
6677    while ((highRes + 1 < totalCnt) &&
6678            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6679            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6680            highResWidth * highResHeight)) {
6681        highRes++;
6682    }
6683    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6684        return true;
6685    } else {
6686        return false;
6687    }
6688}
6689
6690/*===========================================================================
6691 * FUNCTION   : initStaticMetadata
6692 *
6693 * DESCRIPTION: initialize the static metadata
6694 *
6695 * PARAMETERS :
6696 *   @cameraId  : camera Id
6697 *
6698 * RETURN     : int32_t type of status
6699 *              0  -- success
6700 *              non-zero failure code
6701 *==========================================================================*/
6702int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6703{
6704    int rc = 0;
6705    CameraMetadata staticInfo;
6706    size_t count = 0;
6707    bool limitedDevice = false;
6708    char prop[PROPERTY_VALUE_MAX];
6709    bool supportBurst = false;
6710
6711    supportBurst = supportBurstCapture(cameraId);
6712
6713    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6714     * guaranteed or if min fps of max resolution is less than 20 fps, its
6715     * advertised as limited device*/
6716    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6717            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6718            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6719            !supportBurst;
6720
6721    uint8_t supportedHwLvl = limitedDevice ?
6722            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
6723            // LEVEL_3 - This device will support level 3.
6724            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6725
6726    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6727            &supportedHwLvl, 1);
6728
6729    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
6730    /*HAL 3 only*/
6731    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6732                    &gCamCapability[cameraId]->min_focus_distance, 1);
6733
6734    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6735                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
6736
6737    /*should be using focal lengths but sensor doesn't provide that info now*/
6738    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6739                      &gCamCapability[cameraId]->focal_length,
6740                      1);
6741
6742    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6743            gCamCapability[cameraId]->apertures,
6744            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6745
6746    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6747            gCamCapability[cameraId]->filter_densities,
6748            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6749
6750
6751    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6752            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6753            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6754
6755    int32_t lens_shading_map_size[] = {
6756            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6757            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6758    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6759                      lens_shading_map_size,
6760                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6761
6762    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6763            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6764
6765    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6766            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6767
6768    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6769            &gCamCapability[cameraId]->max_frame_duration, 1);
6770
6771    camera_metadata_rational baseGainFactor = {
6772            gCamCapability[cameraId]->base_gain_factor.numerator,
6773            gCamCapability[cameraId]->base_gain_factor.denominator};
6774    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6775                      &baseGainFactor, 1);
6776
6777    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6778                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6779
6780    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6781            gCamCapability[cameraId]->pixel_array_size.height};
6782    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6783                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6784
6785    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6786            gCamCapability[cameraId]->active_array_size.top,
6787            gCamCapability[cameraId]->active_array_size.width,
6788            gCamCapability[cameraId]->active_array_size.height};
6789    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6790            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6791
6792    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6793            &gCamCapability[cameraId]->white_level, 1);
6794
6795    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6796            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6797
6798    bool hasBlackRegions = false;
6799    if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6800        LOGW("black_region_count: %d is bounded to %d",
6801            gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6802        gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6803    }
6804    if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6805        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6806        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6807            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6808        }
6809        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6810                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6811        hasBlackRegions = true;
6812    }
6813
6814    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6815            &gCamCapability[cameraId]->flash_charge_duration, 1);
6816
6817    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6818            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6819
6820    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
6821    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6822            &timestampSource, 1);
6823
6824    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6825            &gCamCapability[cameraId]->histogram_size, 1);
6826
6827    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6828            &gCamCapability[cameraId]->max_histogram_count, 1);
6829
6830    int32_t sharpness_map_size[] = {
6831            gCamCapability[cameraId]->sharpness_map_size.width,
6832            gCamCapability[cameraId]->sharpness_map_size.height};
6833
6834    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6835            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6836
6837    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6838            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6839
6840    int32_t scalar_formats[] = {
6841            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6842            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6843            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6844            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6845            HAL_PIXEL_FORMAT_RAW10,
6846            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6847    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6848    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6849                      scalar_formats,
6850                      scalar_formats_count);
6851
6852    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6853    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6854    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6855            count, MAX_SIZES_CNT, available_processed_sizes);
6856    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6857            available_processed_sizes, count * 2);
6858
6859    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6860    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6861    makeTable(gCamCapability[cameraId]->raw_dim,
6862            count, MAX_SIZES_CNT, available_raw_sizes);
6863    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6864            available_raw_sizes, count * 2);
6865
6866    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6867    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6868    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6869            count, MAX_SIZES_CNT, available_fps_ranges);
6870    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6871            available_fps_ranges, count * 2);
6872
6873    camera_metadata_rational exposureCompensationStep = {
6874            gCamCapability[cameraId]->exp_compensation_step.numerator,
6875            gCamCapability[cameraId]->exp_compensation_step.denominator};
6876    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6877                      &exposureCompensationStep, 1);
6878
6879    Vector<uint8_t> availableVstabModes;
6880    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6881    char eis_prop[PROPERTY_VALUE_MAX];
6882    memset(eis_prop, 0, sizeof(eis_prop));
6883    property_get("persist.camera.eis.enable", eis_prop, "0");
6884    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6885    if (facingBack && eis_prop_set) {
6886        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6887    }
6888    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6889                      availableVstabModes.array(), availableVstabModes.size());
6890
6891    /*HAL 1 and HAL 3 common*/
6892    float maxZoom = 4;
6893    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6894            &maxZoom, 1);
6895
6896    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
6897    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6898
6899    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6900    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6901        max3aRegions[2] = 0; /* AF not supported */
6902    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6903            max3aRegions, 3);
6904
6905    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6906    memset(prop, 0, sizeof(prop));
6907    property_get("persist.camera.facedetect", prop, "1");
6908    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6909    LOGD("Support face detection mode: %d",
6910             supportedFaceDetectMode);
6911
6912    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6913    Vector<uint8_t> availableFaceDetectModes;
6914    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6915    if (supportedFaceDetectMode == 1) {
6916        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6917    } else if (supportedFaceDetectMode == 2) {
6918        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6919    } else if (supportedFaceDetectMode == 3) {
6920        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6921        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6922    } else {
6923        maxFaces = 0;
6924    }
6925    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6926            availableFaceDetectModes.array(),
6927            availableFaceDetectModes.size());
6928    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6929            (int32_t *)&maxFaces, 1);
6930
6931    int32_t exposureCompensationRange[] = {
6932            gCamCapability[cameraId]->exposure_compensation_min,
6933            gCamCapability[cameraId]->exposure_compensation_max};
6934    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6935            exposureCompensationRange,
6936            sizeof(exposureCompensationRange)/sizeof(int32_t));
6937
6938    uint8_t lensFacing = (facingBack) ?
6939            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6940    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6941
6942    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6943                      available_thumbnail_sizes,
6944                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6945
6946    /*all sizes will be clubbed into this tag*/
6947    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6948    /*android.scaler.availableStreamConfigurations*/
6949    Vector<int32_t> available_stream_configs;
6950    cam_dimension_t active_array_dim;
6951    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6952    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6953    /* Add input/output stream configurations for each scalar formats*/
6954    for (size_t j = 0; j < scalar_formats_count; j++) {
6955        switch (scalar_formats[j]) {
6956        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6957        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6958        case HAL_PIXEL_FORMAT_RAW10:
6959            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6960                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6961                addStreamConfig(available_stream_configs, scalar_formats[j],
6962                        gCamCapability[cameraId]->raw_dim[i],
6963                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6964            }
6965            break;
6966        case HAL_PIXEL_FORMAT_BLOB:
6967            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6968                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6969                addStreamConfig(available_stream_configs, scalar_formats[j],
6970                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6971                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6972            }
6973            break;
6974        case HAL_PIXEL_FORMAT_YCbCr_420_888:
6975        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6976        default:
6977            cam_dimension_t largest_picture_size;
6978            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6979            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6980                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6981                addStreamConfig(available_stream_configs, scalar_formats[j],
6982                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6983                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6984                /* Book keep largest */
6985                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6986                        >= largest_picture_size.width &&
6987                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
6988                        >= largest_picture_size.height)
6989                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6990            }
6991            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6992            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6993                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6994                 addStreamConfig(available_stream_configs, scalar_formats[j],
6995                         largest_picture_size,
6996                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6997            }
6998            break;
6999        }
7000    }
7001
7002    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7003                      available_stream_configs.array(), available_stream_configs.size());
7004    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7005    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7006
7007    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7008    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7009
7010    /* android.scaler.availableMinFrameDurations */
7011    Vector<int64_t> available_min_durations;
7012    for (size_t j = 0; j < scalar_formats_count; j++) {
7013        switch (scalar_formats[j]) {
7014        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7015        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7016        case HAL_PIXEL_FORMAT_RAW10:
7017            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7018                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7019                available_min_durations.add(scalar_formats[j]);
7020                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7021                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7022                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7023            }
7024            break;
7025        default:
7026            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7027                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7028                available_min_durations.add(scalar_formats[j]);
7029                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7030                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7031                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7032            }
7033            break;
7034        }
7035    }
7036    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7037                      available_min_durations.array(), available_min_durations.size());
7038
7039    Vector<int32_t> available_hfr_configs;
7040    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7041        int32_t fps = 0;
7042        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7043        case CAM_HFR_MODE_60FPS:
7044            fps = 60;
7045            break;
7046        case CAM_HFR_MODE_90FPS:
7047            fps = 90;
7048            break;
7049        case CAM_HFR_MODE_120FPS:
7050            fps = 120;
7051            break;
7052        case CAM_HFR_MODE_150FPS:
7053            fps = 150;
7054            break;
7055        case CAM_HFR_MODE_180FPS:
7056            fps = 180;
7057            break;
7058        case CAM_HFR_MODE_210FPS:
7059            fps = 210;
7060            break;
7061        case CAM_HFR_MODE_240FPS:
7062            fps = 240;
7063            break;
7064        case CAM_HFR_MODE_480FPS:
7065            fps = 480;
7066            break;
7067        case CAM_HFR_MODE_OFF:
7068        case CAM_HFR_MODE_MAX:
7069        default:
7070            break;
7071        }
7072
7073        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7074        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7075            /* For each HFR frame rate, need to advertise one variable fps range
7076             * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7077             * and [120, 120]. While camcorder preview alone is running [30, 120] is
7078             * set by the app. When video recording is started, [120, 120] is
7079             * set. This way sensor configuration does not change when recording
7080             * is started */
7081
7082            /* (width, height, fps_min, fps_max, batch_size_max) */
7083            for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7084                j < MAX_SIZES_CNT; j++) {
7085                available_hfr_configs.add(
7086                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7087                available_hfr_configs.add(
7088                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7089                available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7090                available_hfr_configs.add(fps);
7091                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7092
7093                /* (width, height, fps_min, fps_max, batch_size_max) */
7094                available_hfr_configs.add(
7095                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7096                available_hfr_configs.add(
7097                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7098                available_hfr_configs.add(fps);
7099                available_hfr_configs.add(fps);
7100                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7101            }
7102       }
7103    }
7104    //Advertise HFR capability only if the property is set
7105    memset(prop, 0, sizeof(prop));
7106    property_get("persist.camera.hal3hfr.enable", prop, "1");
7107    uint8_t hfrEnable = (uint8_t)atoi(prop);
7108
7109    if(hfrEnable && available_hfr_configs.array()) {
7110        staticInfo.update(
7111                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7112                available_hfr_configs.array(), available_hfr_configs.size());
7113    }
7114
7115    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7116    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7117                      &max_jpeg_size, 1);
7118
7119    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7120    size_t size = 0;
7121    count = CAM_EFFECT_MODE_MAX;
7122    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7123    for (size_t i = 0; i < count; i++) {
7124        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7125                gCamCapability[cameraId]->supported_effects[i]);
7126        if (NAME_NOT_FOUND != val) {
7127            avail_effects[size] = (uint8_t)val;
7128            size++;
7129        }
7130    }
7131    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7132                      avail_effects,
7133                      size);
7134
7135    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7136    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7137    size_t supported_scene_modes_cnt = 0;
7138    count = CAM_SCENE_MODE_MAX;
7139    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7140    for (size_t i = 0; i < count; i++) {
7141        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7142                CAM_SCENE_MODE_OFF) {
7143            int val = lookupFwkName(SCENE_MODES_MAP,
7144                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
7145                    gCamCapability[cameraId]->supported_scene_modes[i]);
7146            if (NAME_NOT_FOUND != val) {
7147                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7148                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7149                supported_scene_modes_cnt++;
7150            }
7151        }
7152    }
7153    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7154                      avail_scene_modes,
7155                      supported_scene_modes_cnt);
7156
7157    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
7158    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7159                      supported_scene_modes_cnt,
7160                      CAM_SCENE_MODE_MAX,
7161                      scene_mode_overrides,
7162                      supported_indexes,
7163                      cameraId);
7164
7165    if (supported_scene_modes_cnt == 0) {
7166        supported_scene_modes_cnt = 1;
7167        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7168    }
7169
7170    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7171            scene_mode_overrides, supported_scene_modes_cnt * 3);
7172
7173    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7174                                         ANDROID_CONTROL_MODE_AUTO,
7175                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7176    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7177            available_control_modes,
7178            3);
7179
7180    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7181    size = 0;
7182    count = CAM_ANTIBANDING_MODE_MAX;
7183    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7184    for (size_t i = 0; i < count; i++) {
7185        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7186                gCamCapability[cameraId]->supported_antibandings[i]);
7187        if (NAME_NOT_FOUND != val) {
7188            avail_antibanding_modes[size] = (uint8_t)val;
7189            size++;
7190        }
7191
7192    }
7193    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7194                      avail_antibanding_modes,
7195                      size);
7196
7197    uint8_t avail_abberation_modes[] = {
7198            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7199            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7200            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7201    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7202    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7203    if (0 == count) {
7204        //  If no aberration correction modes are available for a device, this advertise OFF mode
7205        size = 1;
7206    } else {
7207        // If count is not zero then atleast one among the FAST or HIGH quality is supported
7208        // So, advertize all 3 modes if atleast any one mode is supported as per the
7209        // new M requirement
7210        size = 3;
7211    }
7212    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7213            avail_abberation_modes,
7214            size);
7215
7216    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7217    size = 0;
7218    count = CAM_FOCUS_MODE_MAX;
7219    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7220    for (size_t i = 0; i < count; i++) {
7221        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7222                gCamCapability[cameraId]->supported_focus_modes[i]);
7223        if (NAME_NOT_FOUND != val) {
7224            avail_af_modes[size] = (uint8_t)val;
7225            size++;
7226        }
7227    }
7228    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7229                      avail_af_modes,
7230                      size);
7231
7232    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7233    size = 0;
7234    count = CAM_WB_MODE_MAX;
7235    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7236    for (size_t i = 0; i < count; i++) {
7237        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7238                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7239                gCamCapability[cameraId]->supported_white_balances[i]);
7240        if (NAME_NOT_FOUND != val) {
7241            avail_awb_modes[size] = (uint8_t)val;
7242            size++;
7243        }
7244    }
7245    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7246                      avail_awb_modes,
7247                      size);
7248
7249    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7250    count = CAM_FLASH_FIRING_LEVEL_MAX;
7251    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7252            count);
7253    for (size_t i = 0; i < count; i++) {
7254        available_flash_levels[i] =
7255                gCamCapability[cameraId]->supported_firing_levels[i];
7256    }
7257    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7258            available_flash_levels, count);
7259
7260    uint8_t flashAvailable;
7261    if (gCamCapability[cameraId]->flash_available)
7262        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7263    else
7264        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7265    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7266            &flashAvailable, 1);
7267
7268    Vector<uint8_t> avail_ae_modes;
7269    count = CAM_AE_MODE_MAX;
7270    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7271    for (size_t i = 0; i < count; i++) {
7272        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7273    }
7274    if (flashAvailable) {
7275        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7276        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7277    }
7278    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7279                      avail_ae_modes.array(),
7280                      avail_ae_modes.size());
7281
7282    int32_t sensitivity_range[2];
7283    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7284    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7285    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7286                      sensitivity_range,
7287                      sizeof(sensitivity_range) / sizeof(int32_t));
7288
7289    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7290                      &gCamCapability[cameraId]->max_analog_sensitivity,
7291                      1);
7292
7293    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7294    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7295                      &sensor_orientation,
7296                      1);
7297
7298    int32_t max_output_streams[] = {
7299            MAX_STALLING_STREAMS,
7300            MAX_PROCESSED_STREAMS,
7301            MAX_RAW_STREAMS};
7302    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7303            max_output_streams,
7304            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7305
7306    uint8_t avail_leds = 0;
7307    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7308                      &avail_leds, 0);
7309
7310    uint8_t focus_dist_calibrated;
7311    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7312            gCamCapability[cameraId]->focus_dist_calibrated);
7313    if (NAME_NOT_FOUND != val) {
7314        focus_dist_calibrated = (uint8_t)val;
7315        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7316                     &focus_dist_calibrated, 1);
7317    }
7318
7319    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7320    size = 0;
7321    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7322            MAX_TEST_PATTERN_CNT);
7323    for (size_t i = 0; i < count; i++) {
7324        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7325                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7326        if (NAME_NOT_FOUND != testpatternMode) {
7327            avail_testpattern_modes[size] = testpatternMode;
7328            size++;
7329        }
7330    }
7331    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7332                      avail_testpattern_modes,
7333                      size);
7334
7335    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7336    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7337                      &max_pipeline_depth,
7338                      1);
7339
7340    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7341    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7342                      &partial_result_count,
7343                       1);
7344
7345    int32_t max_stall_duration = MAX_REPROCESS_STALL;
7346    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7347
7348    Vector<uint8_t> available_capabilities;
7349    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7350    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7351    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7352    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7353    if (supportBurst) {
7354        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7355    }
7356    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7357    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7358    if (hfrEnable && available_hfr_configs.array()) {
7359        available_capabilities.add(
7360                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7361    }
7362
7363    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7364        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7365    }
7366    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7367            available_capabilities.array(),
7368            available_capabilities.size());
7369
7370    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7371    //Assumption is that all bayer cameras support MANUAL_SENSOR.
7372    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7373            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7374
7375    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7376            &aeLockAvailable, 1);
7377
7378    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7379    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7380    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7381            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7382
7383    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7384            &awbLockAvailable, 1);
7385
7386    int32_t max_input_streams = 1;
7387    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7388                      &max_input_streams,
7389                      1);
7390
7391    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7392    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7393            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7394            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7395            HAL_PIXEL_FORMAT_YCbCr_420_888};
7396    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7397                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7398
7399    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7400    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7401                      &max_latency,
7402                      1);
7403
7404    int32_t isp_sensitivity_range[2];
7405    isp_sensitivity_range[0] =
7406        gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7407    isp_sensitivity_range[1] =
7408        gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7409    staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7410                      isp_sensitivity_range,
7411                      sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7412
7413    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7414                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7415    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7416            available_hot_pixel_modes,
7417            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7418
7419    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7420                                         ANDROID_SHADING_MODE_FAST,
7421                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
7422    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7423                      available_shading_modes,
7424                      3);
7425
7426    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7427                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7428    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7429                      available_lens_shading_map_modes,
7430                      2);
7431
7432    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7433                                      ANDROID_EDGE_MODE_FAST,
7434                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
7435                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7436    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7437            available_edge_modes,
7438            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7439
7440    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7441                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
7442                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7443                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7444                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7445    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7446            available_noise_red_modes,
7447            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7448
7449    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7450                                         ANDROID_TONEMAP_MODE_FAST,
7451                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7452    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7453            available_tonemap_modes,
7454            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7455
7456    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7457    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7458            available_hot_pixel_map_modes,
7459            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7460
7461    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7462            gCamCapability[cameraId]->reference_illuminant1);
7463    if (NAME_NOT_FOUND != val) {
7464        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7465        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7466    }
7467
7468    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7469            gCamCapability[cameraId]->reference_illuminant2);
7470    if (NAME_NOT_FOUND != val) {
7471        uint8_t fwkReferenceIlluminant = (uint8_t)val;
7472        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7473    }
7474
7475    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7476            (void *)gCamCapability[cameraId]->forward_matrix1,
7477            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7478
7479    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7480            (void *)gCamCapability[cameraId]->forward_matrix2,
7481            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7482
7483    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7484            (void *)gCamCapability[cameraId]->color_transform1,
7485            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7486
7487    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7488            (void *)gCamCapability[cameraId]->color_transform2,
7489            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7490
7491    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7492            (void *)gCamCapability[cameraId]->calibration_transform1,
7493            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7494
7495    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7496            (void *)gCamCapability[cameraId]->calibration_transform2,
7497            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7498
7499    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7500       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7501       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7502       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7503       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7504       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7505       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7506       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7507       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7508       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7509       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7510       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7511       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7512       ANDROID_JPEG_GPS_COORDINATES,
7513       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7514       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7515       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7516       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7517       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7518       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7519       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7520       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7521       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7522       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
7523       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7524       ANDROID_STATISTICS_FACE_DETECT_MODE,
7525       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7526       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7527       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7528       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7529       /* DevCamDebug metadata request_keys_basic */
7530       DEVCAMDEBUG_META_ENABLE,
7531       /* DevCamDebug metadata end */
7532       };
7533
7534    size_t request_keys_cnt =
7535            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7536    Vector<int32_t> available_request_keys;
7537    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7538    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7539        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7540    }
7541
7542    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7543            available_request_keys.array(), available_request_keys.size());
7544
7545    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7546       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7547       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7548       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7549       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7550       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7551       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7552       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7553       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7554       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7555       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7556       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7557       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7558       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7559       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7560       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7561       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7562       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7563       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7564       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7565       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7566       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
7567       ANDROID_STATISTICS_FACE_SCORES,
7568       NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
7569       NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
7570       // DevCamDebug metadata result_keys_basic
7571       DEVCAMDEBUG_META_ENABLE,
7572       // DevCamDebug metadata result_keys AF
7573       DEVCAMDEBUG_AF_LENS_POSITION,
7574       DEVCAMDEBUG_AF_TOF_CONFIDENCE,
7575       DEVCAMDEBUG_AF_TOF_DISTANCE,
7576       DEVCAMDEBUG_AF_LUMA,
7577       DEVCAMDEBUG_AF_HAF_STATE,
7578       DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
7579       DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
7580       DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
7581       DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
7582       DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
7583       DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
7584       DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
7585       DEVCAMDEBUG_AF_MONITOR_REFOCUS,
7586       DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
7587       DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
7588       DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
7589       DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
7590       DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
7591       DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
7592       DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
7593       DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
7594       DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
7595       DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
7596       DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
7597       DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
7598       DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
7599       DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
7600       // DevCamDebug metadata result_keys AEC
7601       DEVCAMDEBUG_AEC_TARGET_LUMA,
7602       DEVCAMDEBUG_AEC_COMP_LUMA,
7603       DEVCAMDEBUG_AEC_AVG_LUMA,
7604       DEVCAMDEBUG_AEC_CUR_LUMA,
7605       DEVCAMDEBUG_AEC_LINECOUNT,
7606       DEVCAMDEBUG_AEC_REAL_GAIN,
7607       DEVCAMDEBUG_AEC_EXP_INDEX,
7608       DEVCAMDEBUG_AEC_LUX_IDX,
7609       // DevCamDebug metadata result_keys AWB
7610       DEVCAMDEBUG_AWB_R_GAIN,
7611       DEVCAMDEBUG_AWB_G_GAIN,
7612       DEVCAMDEBUG_AWB_B_GAIN,
7613       DEVCAMDEBUG_AWB_CCT,
7614       DEVCAMDEBUG_AWB_DECISION,
7615       /* DevCamDebug metadata end */
7616       };
7617    size_t result_keys_cnt =
7618            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7619
7620    Vector<int32_t> available_result_keys;
7621    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7622    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7623        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7624    }
7625    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7626        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7627        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7628    }
7629    if (supportedFaceDetectMode == 1) {
7630        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7631        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7632    } else if ((supportedFaceDetectMode == 2) ||
7633            (supportedFaceDetectMode == 3)) {
7634        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7635        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7636    }
7637    if (hasBlackRegions) {
7638        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7639        available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7640    }
7641    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7642            available_result_keys.array(), available_result_keys.size());
7643
7644    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7645       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7646       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7647       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7648       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7649       ANDROID_SCALER_CROPPING_TYPE,
7650       ANDROID_SYNC_MAX_LATENCY,
7651       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7652       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7653       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7654       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7655       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7656       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7657       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7658       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7659       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7660       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7661       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7662       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7663       ANDROID_LENS_FACING,
7664       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7665       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7666       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7667       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7668       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7669       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7670       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7671       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7672       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7673       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7674       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7675       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7676       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7677       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7678       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7679       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7680       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7681       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7682       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7683       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7684       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7685       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7686       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7687       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7688       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7689       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7690       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7691       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7692       ANDROID_TONEMAP_MAX_CURVE_POINTS,
7693       ANDROID_CONTROL_AVAILABLE_MODES,
7694       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7695       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7696       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7697       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7698       ANDROID_SHADING_AVAILABLE_MODES,
7699       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7700       ANDROID_SENSOR_OPAQUE_RAW_SIZE };
7701
7702    Vector<int32_t> available_characteristics_keys;
7703    available_characteristics_keys.appendArray(characteristics_keys_basic,
7704            sizeof(characteristics_keys_basic)/sizeof(int32_t));
7705    if (hasBlackRegions) {
7706        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7707    }
7708    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
7709                      available_characteristics_keys.array(),
7710                      available_characteristics_keys.size());
7711
7712    /*available stall durations depend on the hw + sw and will be different for different devices */
7713    /*have to add for raw after implementation*/
7714    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7715    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7716
7717    Vector<int64_t> available_stall_durations;
7718    for (uint32_t j = 0; j < stall_formats_count; j++) {
7719        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7720            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7721                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7722                available_stall_durations.add(stall_formats[j]);
7723                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7724                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7725                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7726          }
7727        } else {
7728            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7729                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7730                available_stall_durations.add(stall_formats[j]);
7731                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7732                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7733                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7734            }
7735        }
7736    }
7737    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7738                      available_stall_durations.array(),
7739                      available_stall_durations.size());
7740
7741    //QCAMERA3_OPAQUE_RAW
7742    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7743    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7744    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7745    case LEGACY_RAW:
7746        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7747            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7748        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7749            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7750        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7751            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7752        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7753        break;
7754    case MIPI_RAW:
7755        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7756            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7757        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7758            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7759        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7760            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7761        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7762        break;
7763    default:
7764        LOGE("unknown opaque_raw_format %d",
7765                gCamCapability[cameraId]->opaque_raw_fmt);
7766        break;
7767    }
7768    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7769
7770    Vector<int32_t> strides;
7771    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7772            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7773        cam_stream_buf_plane_info_t buf_planes;
7774        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7775        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7776        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7777            &gCamCapability[cameraId]->padding_info, &buf_planes);
7778        strides.add(buf_planes.plane_info.mp[0].stride);
7779    }
7780    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7781            strides.size());
7782
7783    Vector<int32_t> opaque_size;
7784    for (size_t j = 0; j < scalar_formats_count; j++) {
7785        if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7786            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7787                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7788                cam_stream_buf_plane_info_t buf_planes;
7789
7790                rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7791                         &gCamCapability[cameraId]->padding_info, &buf_planes);
7792
7793                if (rc == 0) {
7794                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7795                    opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7796                    opaque_size.add(buf_planes.plane_info.frame_len);
7797                }else {
7798                    LOGE("raw frame calculation failed!");
7799                }
7800            }
7801        }
7802    }
7803
7804    if ((opaque_size.size() > 0) &&
7805            (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7806        staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7807    else
7808        LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7809
7810    gStaticMetadata[cameraId] = staticInfo.release();
7811    return rc;
7812}
7813
7814/*===========================================================================
7815 * FUNCTION   : makeTable
7816 *
7817 * DESCRIPTION: make a table of sizes
7818 *
7819 * PARAMETERS :
7820 *
7821 *
7822 *==========================================================================*/
7823void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7824        size_t max_size, int32_t *sizeTable)
7825{
7826    size_t j = 0;
7827    if (size > max_size) {
7828       size = max_size;
7829    }
7830    for (size_t i = 0; i < size; i++) {
7831        sizeTable[j] = dimTable[i].width;
7832        sizeTable[j+1] = dimTable[i].height;
7833        j+=2;
7834    }
7835}
7836
7837/*===========================================================================
7838 * FUNCTION   : makeFPSTable
7839 *
7840 * DESCRIPTION: make a table of fps ranges
7841 *
7842 * PARAMETERS :
7843 *
7844 *==========================================================================*/
7845void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7846        size_t max_size, int32_t *fpsRangesTable)
7847{
7848    size_t j = 0;
7849    if (size > max_size) {
7850       size = max_size;
7851    }
7852    for (size_t i = 0; i < size; i++) {
7853        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7854        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7855        j+=2;
7856    }
7857}
7858
7859/*===========================================================================
7860 * FUNCTION   : makeOverridesList
7861 *
7862 * DESCRIPTION: make a list of scene mode overrides
7863 *
7864 * PARAMETERS :
7865 *
7866 *
7867 *==========================================================================*/
7868void QCamera3HardwareInterface::makeOverridesList(
7869        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7870        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7871{
7872    /*daemon will give a list of overrides for all scene modes.
7873      However we should send the fwk only the overrides for the scene modes
7874      supported by the framework*/
7875    size_t j = 0;
7876    if (size > max_size) {
7877       size = max_size;
7878    }
7879    size_t focus_count = CAM_FOCUS_MODE_MAX;
7880    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7881            focus_count);
7882    for (size_t i = 0; i < size; i++) {
7883        bool supt = false;
7884        size_t index = supported_indexes[i];
7885        overridesList[j] = gCamCapability[camera_id]->flash_available ?
7886                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7887        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7888                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7889                overridesTable[index].awb_mode);
7890        if (NAME_NOT_FOUND != val) {
7891            overridesList[j+1] = (uint8_t)val;
7892        }
7893        uint8_t focus_override = overridesTable[index].af_mode;
7894        for (size_t k = 0; k < focus_count; k++) {
7895           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7896              supt = true;
7897              break;
7898           }
7899        }
7900        if (supt) {
7901            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7902                    focus_override);
7903            if (NAME_NOT_FOUND != val) {
7904                overridesList[j+2] = (uint8_t)val;
7905            }
7906        } else {
7907           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7908        }
7909        j+=3;
7910    }
7911}
7912
7913/*===========================================================================
7914 * FUNCTION   : filterJpegSizes
7915 *
7916 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7917 *              could be downscaled to
7918 *
7919 * PARAMETERS :
7920 *
7921 * RETURN     : length of jpegSizes array
7922 *==========================================================================*/
7923
7924size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7925        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7926        uint8_t downscale_factor)
7927{
7928    if (0 == downscale_factor) {
7929        downscale_factor = 1;
7930    }
7931
7932    int32_t min_width = active_array_size.width / downscale_factor;
7933    int32_t min_height = active_array_size.height / downscale_factor;
7934    size_t jpegSizesCnt = 0;
7935    if (processedSizesCnt > maxCount) {
7936        processedSizesCnt = maxCount;
7937    }
7938    for (size_t i = 0; i < processedSizesCnt; i+=2) {
7939        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7940            jpegSizes[jpegSizesCnt] = processedSizes[i];
7941            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7942            jpegSizesCnt += 2;
7943        }
7944    }
7945    return jpegSizesCnt;
7946}
7947
7948/*===========================================================================
7949 * FUNCTION   : computeNoiseModelEntryS
7950 *
7951 * DESCRIPTION: function to map a given sensitivity to the S noise
7952 *              model parameters in the DNG noise model.
7953 *
7954 * PARAMETERS : sens : the sensor sensitivity
7955 *
7956 ** RETURN    : S (sensor amplification) noise
7957 *
7958 *==========================================================================*/
7959double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7960    double s = gCamCapability[mCameraId]->gradient_S * sens +
7961            gCamCapability[mCameraId]->offset_S;
7962    return ((s < 0.0) ? 0.0 : s);
7963}
7964
7965/*===========================================================================
7966 * FUNCTION   : computeNoiseModelEntryO
7967 *
7968 * DESCRIPTION: function to map a given sensitivity to the O noise
7969 *              model parameters in the DNG noise model.
7970 *
7971 * PARAMETERS : sens : the sensor sensitivity
7972 *
7973 ** RETURN    : O (sensor readout) noise
7974 *
7975 *==========================================================================*/
7976double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7977    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7978    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7979            1.0 : (1.0 * sens / max_analog_sens);
7980    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7981            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7982    return ((o < 0.0) ? 0.0 : o);
7983}
7984
7985/*===========================================================================
7986 * FUNCTION   : getSensorSensitivity
7987 *
7988 * DESCRIPTION: convert iso_mode to an integer value
7989 *
7990 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7991 *
7992 ** RETURN    : sensitivity supported by sensor
7993 *
7994 *==========================================================================*/
7995int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7996{
7997    int32_t sensitivity;
7998
7999    switch (iso_mode) {
8000    case CAM_ISO_MODE_100:
8001        sensitivity = 100;
8002        break;
8003    case CAM_ISO_MODE_200:
8004        sensitivity = 200;
8005        break;
8006    case CAM_ISO_MODE_400:
8007        sensitivity = 400;
8008        break;
8009    case CAM_ISO_MODE_800:
8010        sensitivity = 800;
8011        break;
8012    case CAM_ISO_MODE_1600:
8013        sensitivity = 1600;
8014        break;
8015    default:
8016        sensitivity = -1;
8017        break;
8018    }
8019    return sensitivity;
8020}
8021
8022/*===========================================================================
8023 * FUNCTION   : getCamInfo
8024 *
8025 * DESCRIPTION: query camera capabilities
8026 *
8027 * PARAMETERS :
8028 *   @cameraId  : camera Id
8029 *   @info      : camera info struct to be filled in with camera capabilities
8030 *
8031 * RETURN     : int type of status
8032 *              NO_ERROR  -- success
8033 *              none-zero failure code
8034 *==========================================================================*/
8035int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8036        struct camera_info *info)
8037{
8038    ATRACE_CALL();
8039    int rc = 0;
8040
8041    pthread_mutex_lock(&gCamLock);
8042    if (NULL == gCamCapability[cameraId]) {
8043        rc = initCapabilities(cameraId);
8044        if (rc < 0) {
8045            pthread_mutex_unlock(&gCamLock);
8046            return rc;
8047        }
8048    }
8049
8050    if (NULL == gStaticMetadata[cameraId]) {
8051        rc = initStaticMetadata(cameraId);
8052        if (rc < 0) {
8053            pthread_mutex_unlock(&gCamLock);
8054            return rc;
8055        }
8056    }
8057
8058    switch(gCamCapability[cameraId]->position) {
8059    case CAM_POSITION_BACK:
8060        info->facing = CAMERA_FACING_BACK;
8061        break;
8062
8063    case CAM_POSITION_FRONT:
8064        info->facing = CAMERA_FACING_FRONT;
8065        break;
8066
8067    default:
8068        LOGE("Unknown position type for camera id:%d", cameraId);
8069        rc = -1;
8070        break;
8071    }
8072
8073
8074    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
8075    info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8076    info->static_camera_characteristics = gStaticMetadata[cameraId];
8077
8078    //For now assume both cameras can operate independently.
8079    info->conflicting_devices = NULL;
8080    info->conflicting_devices_length = 0;
8081
8082    //resource cost is 100 * MIN(1.0, m/M),
8083    //where m is throughput requirement with maximum stream configuration
8084    //and M is CPP maximum throughput.
8085    float max_fps = 0.0;
8086    for (uint32_t i = 0;
8087            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8088        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8089            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8090    }
8091    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8092            gCamCapability[cameraId]->active_array_size.width *
8093            gCamCapability[cameraId]->active_array_size.height * max_fps /
8094            gCamCapability[cameraId]->max_pixel_bandwidth;
8095    info->resource_cost = 100 * MIN(1.0, ratio);
8096    LOGI("camera %d resource cost is %d", cameraId,
8097            info->resource_cost);
8098
8099    pthread_mutex_unlock(&gCamLock);
8100    return rc;
8101}
8102
8103/*===========================================================================
8104 * FUNCTION   : translateCapabilityToMetadata
8105 *
8106 * DESCRIPTION: translate the capability into camera_metadata_t
8107 *
8108 * PARAMETERS : type of the request
8109 *
8110 *
8111 * RETURN     : success: camera_metadata_t*
8112 *              failure: NULL
8113 *
8114 *==========================================================================*/
8115camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8116{
8117    if (mDefaultMetadata[type] != NULL) {
8118        return mDefaultMetadata[type];
8119    }
8120    //first time we are handling this request
8121    //fill up the metadata structure using the wrapper class
8122    CameraMetadata settings;
8123    //translate from cam_capability_t to camera_metadata_tag_t
8124    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8125    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8126    int32_t defaultRequestID = 0;
8127    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8128
8129    /* OIS disable */
8130    char ois_prop[PROPERTY_VALUE_MAX];
8131    memset(ois_prop, 0, sizeof(ois_prop));
8132    property_get("persist.camera.ois.disable", ois_prop, "0");
8133    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8134
8135    /* Force video to use OIS */
8136    char videoOisProp[PROPERTY_VALUE_MAX];
8137    memset(videoOisProp, 0, sizeof(videoOisProp));
8138    property_get("persist.camera.ois.video", videoOisProp, "1");
8139    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
8140
8141    // EIS enable/disable
8142    char eis_prop[PROPERTY_VALUE_MAX];
8143    memset(eis_prop, 0, sizeof(eis_prop));
8144    property_get("persist.camera.eis.enable", eis_prop, "0");
8145    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
8146
8147    // Hybrid AE enable/disable
8148    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
8149    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
8150    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
8151    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
8152
8153    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
8154    // This is a bit hacky. EIS is enabled only when the above setprop
8155    // is set to non-zero value and on back camera (for 2015 Nexus).
8156    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
8157    // configureStream is called before this function. In other words,
8158    // we cannot guarantee the app will call configureStream before
8159    // calling createDefaultRequest.
8160    const bool eisEnabled = facingBack && eis_prop_set;
8161
8162    uint8_t controlIntent = 0;
8163    uint8_t focusMode;
8164    uint8_t vsMode;
8165    uint8_t optStabMode;
8166    uint8_t cacMode;
8167    uint8_t edge_mode;
8168    uint8_t noise_red_mode;
8169    uint8_t tonemap_mode;
8170    bool highQualityModeEntryAvailable = FALSE;
8171    bool fastModeEntryAvailable = FALSE;
8172    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8173    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8174    switch (type) {
8175      case CAMERA3_TEMPLATE_PREVIEW:
8176        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8177        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8178        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8179        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8180        edge_mode = ANDROID_EDGE_MODE_FAST;
8181        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8182        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8183        break;
8184      case CAMERA3_TEMPLATE_STILL_CAPTURE:
8185        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8186        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8187        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8188        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8189        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8190        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8191        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8192        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8193        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8194            if (gCamCapability[mCameraId]->aberration_modes[i] ==
8195                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8196                highQualityModeEntryAvailable = TRUE;
8197            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8198                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8199                fastModeEntryAvailable = TRUE;
8200            }
8201        }
8202        if (highQualityModeEntryAvailable) {
8203            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8204        } else if (fastModeEntryAvailable) {
8205            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8206        }
8207        break;
8208      case CAMERA3_TEMPLATE_VIDEO_RECORD:
8209        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8210        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8211        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8212        if (eisEnabled) {
8213            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8214        }
8215        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8216        edge_mode = ANDROID_EDGE_MODE_FAST;
8217        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8218        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8219        if (forceVideoOis)
8220            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8221        break;
8222      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8223        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8224        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8225        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8226        if (eisEnabled) {
8227            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
8228        }
8229        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8230        edge_mode = ANDROID_EDGE_MODE_FAST;
8231        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8232        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8233        if (forceVideoOis)
8234            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8235        break;
8236      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8237        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8238        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8239        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8240        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8241        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8242        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8243        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8244        break;
8245      case CAMERA3_TEMPLATE_MANUAL:
8246        edge_mode = ANDROID_EDGE_MODE_FAST;
8247        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8248        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8249        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8250        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8251        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8252        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8253        break;
8254      default:
8255        edge_mode = ANDROID_EDGE_MODE_FAST;
8256        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8257        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8258        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8259        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8260        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8261        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8262        break;
8263    }
8264    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8265    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8266    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8267    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8268        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8269    }
8270    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8271
8272    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8273            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8274        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8275    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8276            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8277            || ois_disable)
8278        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8279    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8280
8281    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8282            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8283
8284    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8285    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8286
8287    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8288    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8289
8290    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8291    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8292
8293    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8294    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8295
8296    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8297    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8298
8299    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8300    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8301
8302    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8303    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8304
8305    /*flash*/
8306    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8307    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8308
8309    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8310    settings.update(ANDROID_FLASH_FIRING_POWER,
8311            &flashFiringLevel, 1);
8312
8313    /* lens */
8314    float default_aperture = gCamCapability[mCameraId]->apertures[0];
8315    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8316
8317    if (gCamCapability[mCameraId]->filter_densities_count) {
8318        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8319        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8320                        gCamCapability[mCameraId]->filter_densities_count);
8321    }
8322
8323    float default_focal_length = gCamCapability[mCameraId]->focal_length;
8324    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8325
8326    if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8327        float default_focus_distance = 0;
8328        settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8329    }
8330
8331    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8332    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8333
8334    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8335    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8336
8337    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8338    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8339
8340    /* face detection (default to OFF) */
8341    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8342    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8343
8344    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8345    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8346
8347    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8348    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8349
8350    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8351    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8352
8353    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8354    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
8355
8356    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8357    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8358
8359    /* Exposure time(Update the Min Exposure Time)*/
8360    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8361    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8362
8363    /* frame duration */
8364    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8365    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8366
8367    /* sensitivity */
8368    static const int32_t default_sensitivity = 100;
8369    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
8370    static const int32_t default_isp_sensitivity =
8371            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8372    settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8373
8374    /*edge mode*/
8375    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8376
8377    /*noise reduction mode*/
8378    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8379
8380    /*color correction mode*/
8381    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8382    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8383
8384    /*transform matrix mode*/
8385    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8386
8387    int32_t scaler_crop_region[4];
8388    scaler_crop_region[0] = 0;
8389    scaler_crop_region[1] = 0;
8390    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8391    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8392    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8393
8394    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8395    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8396
8397    /*focus distance*/
8398    float focus_distance = 0.0;
8399    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8400
8401    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8402    /* Restrict default preview template to max 30 fps */
8403    float max_range = 0.0;
8404    float max_fixed_fps = 0.0;
8405    int32_t fps_range[2] = {0, 0};
8406    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8407            i++) {
8408        if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
8409                TEMPLATE_MAX_PREVIEW_FPS) {
8410            continue;
8411        }
8412        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8413            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8414        if (type == CAMERA3_TEMPLATE_PREVIEW ||
8415                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8416                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8417            if (range > max_range) {
8418                fps_range[0] =
8419                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8420                fps_range[1] =
8421                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8422                max_range = range;
8423            }
8424        } else {
8425            if (range < 0.01 && max_fixed_fps <
8426                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8427                fps_range[0] =
8428                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8429                fps_range[1] =
8430                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8431                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8432            }
8433        }
8434    }
8435    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8436
8437    /*precapture trigger*/
8438    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8439    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8440
8441    /*af trigger*/
8442    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8443    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8444
8445    /* ae & af regions */
8446    int32_t active_region[] = {
8447            gCamCapability[mCameraId]->active_array_size.left,
8448            gCamCapability[mCameraId]->active_array_size.top,
8449            gCamCapability[mCameraId]->active_array_size.left +
8450                    gCamCapability[mCameraId]->active_array_size.width,
8451            gCamCapability[mCameraId]->active_array_size.top +
8452                    gCamCapability[mCameraId]->active_array_size.height,
8453            0};
8454    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8455            sizeof(active_region) / sizeof(active_region[0]));
8456    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8457            sizeof(active_region) / sizeof(active_region[0]));
8458
8459    /* black level lock */
8460    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8461    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8462
8463    /* lens shading map mode */
8464    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8465    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8466        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8467    }
8468    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8469
8470    //special defaults for manual template
8471    if (type == CAMERA3_TEMPLATE_MANUAL) {
8472        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8473        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8474
8475        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8476        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8477
8478        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8479        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8480
8481        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8482        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8483
8484        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8485        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8486
8487        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8488        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8489    }
8490
8491
8492    /* TNR
8493     * We'll use this location to determine which modes TNR will be set.
8494     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8495     * This is not to be confused with linking on a per stream basis that decision
8496     * is still on per-session basis and will be handled as part of config stream
8497     */
8498    uint8_t tnr_enable = 0;
8499
8500    if (m_bTnrPreview || m_bTnrVideo) {
8501
8502        switch (type) {
8503            case CAMERA3_TEMPLATE_VIDEO_RECORD:
8504                    tnr_enable = 1;
8505                    break;
8506
8507            default:
8508                    tnr_enable = 0;
8509                    break;
8510        }
8511
8512        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8513        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8514        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8515
8516        LOGD("TNR:%d with process plate %d for template:%d",
8517                             tnr_enable, tnr_process_type, type);
8518    }
8519
8520    //Update Link tags to default
8521    int32_t sync_type = CAM_TYPE_STANDALONE;
8522    settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8523
8524    int32_t is_main = 0; //this doesn't matter as app should overwrite
8525    settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8526
8527    settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8528
8529    /* CDS default */
8530    char prop[PROPERTY_VALUE_MAX];
8531    memset(prop, 0, sizeof(prop));
8532    property_get("persist.camera.CDS", prop, "Auto");
8533    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8534    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8535    if (CAM_CDS_MODE_MAX == cds_mode) {
8536        cds_mode = CAM_CDS_MODE_AUTO;
8537    }
8538
8539    /* Disabling CDS in templates which have TNR enabled*/
8540    if (tnr_enable)
8541        cds_mode = CAM_CDS_MODE_OFF;
8542
8543    int32_t mode = cds_mode;
8544    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
8545
8546    /* hybrid ae */
8547    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
8548
8549    mDefaultMetadata[type] = settings.release();
8550
8551    return mDefaultMetadata[type];
8552}
8553
8554/*===========================================================================
8555 * FUNCTION   : setFrameParameters
8556 *
8557 * DESCRIPTION: set parameters per frame as requested in the metadata from
8558 *              framework
8559 *
8560 * PARAMETERS :
8561 *   @request   : request that needs to be serviced
8562 *   @streamID : Stream ID of all the requested streams
8563 *   @blob_request: Whether this request is a blob request or not
8564 *
8565 * RETURN     : success: NO_ERROR
8566 *              failure:
8567 *==========================================================================*/
8568int QCamera3HardwareInterface::setFrameParameters(
8569                    camera3_capture_request_t *request,
8570                    cam_stream_ID_t streamID,
8571                    int blob_request,
8572                    uint32_t snapshotStreamId)
8573{
8574    /*translate from camera_metadata_t type to parm_type_t*/
8575    int rc = 0;
8576    int32_t hal_version = CAM_HAL_V3;
8577
8578    clear_metadata_buffer(mParameters);
8579    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8580        LOGE("Failed to set hal version in the parameters");
8581        return BAD_VALUE;
8582    }
8583
8584    /*we need to update the frame number in the parameters*/
8585    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8586            request->frame_number)) {
8587        LOGE("Failed to set the frame number in the parameters");
8588        return BAD_VALUE;
8589    }
8590
8591    /* Update stream id of all the requested buffers */
8592    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8593        LOGE("Failed to set stream type mask in the parameters");
8594        return BAD_VALUE;
8595    }
8596
8597    if (mUpdateDebugLevel) {
8598        uint32_t dummyDebugLevel = 0;
8599        /* The value of dummyDebugLevel is irrelavent. On
8600         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8601        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8602                dummyDebugLevel)) {
8603            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8604            return BAD_VALUE;
8605        }
8606        mUpdateDebugLevel = false;
8607    }
8608
8609    if(request->settings != NULL){
8610        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8611        if (blob_request)
8612            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8613    }
8614
8615    return rc;
8616}
8617
8618/*===========================================================================
8619 * FUNCTION   : setReprocParameters
8620 *
8621 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8622 *              return it.
8623 *
8624 * PARAMETERS :
8625 *   @request   : request that needs to be serviced
8626 *
8627 * RETURN     : success: NO_ERROR
8628 *              failure:
8629 *==========================================================================*/
8630int32_t QCamera3HardwareInterface::setReprocParameters(
8631        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8632        uint32_t snapshotStreamId)
8633{
8634    /*translate from camera_metadata_t type to parm_type_t*/
8635    int rc = 0;
8636
8637    if (NULL == request->settings){
8638        LOGE("Reprocess settings cannot be NULL");
8639        return BAD_VALUE;
8640    }
8641
8642    if (NULL == reprocParam) {
8643        LOGE("Invalid reprocessing metadata buffer");
8644        return BAD_VALUE;
8645    }
8646    clear_metadata_buffer(reprocParam);
8647
8648    /*we need to update the frame number in the parameters*/
8649    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8650            request->frame_number)) {
8651        LOGE("Failed to set the frame number in the parameters");
8652        return BAD_VALUE;
8653    }
8654
8655    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8656    if (rc < 0) {
8657        LOGE("Failed to translate reproc request");
8658        return rc;
8659    }
8660
8661    CameraMetadata frame_settings;
8662    frame_settings = request->settings;
8663    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8664            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8665        int32_t *crop_count =
8666                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8667        int32_t *crop_data =
8668                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8669        int32_t *roi_map =
8670                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8671        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8672            cam_crop_data_t crop_meta;
8673            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8674            crop_meta.num_of_streams = 1;
8675            crop_meta.crop_info[0].crop.left   = crop_data[0];
8676            crop_meta.crop_info[0].crop.top    = crop_data[1];
8677            crop_meta.crop_info[0].crop.width  = crop_data[2];
8678            crop_meta.crop_info[0].crop.height = crop_data[3];
8679
8680            crop_meta.crop_info[0].roi_map.left =
8681                    roi_map[0];
8682            crop_meta.crop_info[0].roi_map.top =
8683                    roi_map[1];
8684            crop_meta.crop_info[0].roi_map.width =
8685                    roi_map[2];
8686            crop_meta.crop_info[0].roi_map.height =
8687                    roi_map[3];
8688
8689            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8690                rc = BAD_VALUE;
8691            }
8692            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8693                    request->input_buffer->stream,
8694                    crop_meta.crop_info[0].crop.left,
8695                    crop_meta.crop_info[0].crop.top,
8696                    crop_meta.crop_info[0].crop.width,
8697                    crop_meta.crop_info[0].crop.height);
8698            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8699                    request->input_buffer->stream,
8700                    crop_meta.crop_info[0].roi_map.left,
8701                    crop_meta.crop_info[0].roi_map.top,
8702                    crop_meta.crop_info[0].roi_map.width,
8703                    crop_meta.crop_info[0].roi_map.height);
8704            } else {
8705                LOGE("Invalid reprocess crop count %d!", *crop_count);
8706            }
8707    } else {
8708        LOGE("No crop data from matching output stream");
8709    }
8710
8711    /* These settings are not needed for regular requests so handle them specially for
8712       reprocess requests; information needed for EXIF tags */
8713    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8714        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8715                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8716        if (NAME_NOT_FOUND != val) {
8717            uint32_t flashMode = (uint32_t)val;
8718            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8719                rc = BAD_VALUE;
8720            }
8721        } else {
8722            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8723                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8724        }
8725    } else {
8726        LOGH("No flash mode in reprocess settings");
8727    }
8728
8729    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8730        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8731        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8732            rc = BAD_VALUE;
8733        }
8734    } else {
8735        LOGH("No flash state in reprocess settings");
8736    }
8737
8738    return rc;
8739}
8740
8741/*===========================================================================
8742 * FUNCTION   : saveRequestSettings
8743 *
8744 * DESCRIPTION: Add any settings that might have changed to the request settings
8745 *              and save the settings to be applied on the frame
8746 *
8747 * PARAMETERS :
8748 *   @jpegMetadata : the extracted and/or modified jpeg metadata
8749 *   @request      : request with initial settings
8750 *
8751 * RETURN     :
8752 * camera_metadata_t* : pointer to the saved request settings
8753 *==========================================================================*/
8754camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8755        const CameraMetadata &jpegMetadata,
8756        camera3_capture_request_t *request)
8757{
8758    camera_metadata_t *resultMetadata;
8759    CameraMetadata camMetadata;
8760    camMetadata = request->settings;
8761
8762    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8763        int32_t thumbnail_size[2];
8764        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8765        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8766        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8767                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8768    }
8769
8770    resultMetadata = camMetadata.release();
8771    return resultMetadata;
8772}
8773
8774/*===========================================================================
8775 * FUNCTION   : setHalFpsRange
8776 *
8777 * DESCRIPTION: set FPS range parameter
8778 *
8779 *
8780 * PARAMETERS :
8781 *   @settings    : Metadata from framework
8782 *   @hal_metadata: Metadata buffer
8783 *
8784 *
8785 * RETURN     : success: NO_ERROR
8786 *              failure:
8787 *==========================================================================*/
8788int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8789        metadata_buffer_t *hal_metadata)
8790{
8791    int32_t rc = NO_ERROR;
8792    cam_fps_range_t fps_range;
8793    fps_range.min_fps = (float)
8794            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8795    fps_range.max_fps = (float)
8796            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8797    fps_range.video_min_fps = fps_range.min_fps;
8798    fps_range.video_max_fps = fps_range.max_fps;
8799
8800    LOGD("aeTargetFpsRange fps: [%f %f]",
8801            fps_range.min_fps, fps_range.max_fps);
8802    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8803     * follows:
8804     * ---------------------------------------------------------------|
8805     *      Video stream is absent in configure_streams               |
8806     *    (Camcorder preview before the first video record            |
8807     * ---------------------------------------------------------------|
8808     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8809     *                   |             |             | vid_min/max_fps|
8810     * ---------------------------------------------------------------|
8811     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8812     *                   |-------------|-------------|----------------|
8813     *                   |  [240, 240] |     240     |  [240, 240]    |
8814     * ---------------------------------------------------------------|
8815     *     Video stream is present in configure_streams               |
8816     * ---------------------------------------------------------------|
8817     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8818     *                   |             |             | vid_min/max_fps|
8819     * ---------------------------------------------------------------|
8820     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8821     * (camcorder prev   |-------------|-------------|----------------|
8822     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
8823     *  is stopped)      |             |             |                |
8824     * ---------------------------------------------------------------|
8825     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
8826     *                   |-------------|-------------|----------------|
8827     *                   |  [240, 240] |     240     |  [240, 240]    |
8828     * ---------------------------------------------------------------|
8829     * When Video stream is absent in configure_streams,
8830     * preview fps = sensor_fps / batchsize
8831     * Eg: for 240fps at batchSize 4, preview = 60fps
8832     *     for 120fps at batchSize 4, preview = 30fps
8833     *
8834     * When video stream is present in configure_streams, preview fps is as per
8835     * the ratio of preview buffers to video buffers requested in process
8836     * capture request
8837     */
8838    mBatchSize = 0;
8839    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8840        fps_range.min_fps = fps_range.video_max_fps;
8841        fps_range.video_min_fps = fps_range.video_max_fps;
8842        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8843                fps_range.max_fps);
8844        if (NAME_NOT_FOUND != val) {
8845            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8846            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8847                return BAD_VALUE;
8848            }
8849
8850            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8851                /* If batchmode is currently in progress and the fps changes,
8852                 * set the flag to restart the sensor */
8853                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8854                        (mHFRVideoFps != fps_range.max_fps)) {
8855                    mNeedSensorRestart = true;
8856                }
8857                mHFRVideoFps = fps_range.max_fps;
8858                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8859                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8860                    mBatchSize = MAX_HFR_BATCH_SIZE;
8861                }
8862             }
8863            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
8864
8865         }
8866    } else {
8867        /* HFR mode is session param in backend/ISP. This should be reset when
8868         * in non-HFR mode  */
8869        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8870        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8871            return BAD_VALUE;
8872        }
8873    }
8874    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8875        return BAD_VALUE;
8876    }
8877    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
8878            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8879    return rc;
8880}
8881
8882/*===========================================================================
8883 * FUNCTION   : translateToHalMetadata
8884 *
8885 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
8886 *
8887 *
8888 * PARAMETERS :
8889 *   @request  : request sent from framework
8890 *
8891 *
8892 * RETURN     : success: NO_ERROR
8893 *              failure:
8894 *==========================================================================*/
8895int QCamera3HardwareInterface::translateToHalMetadata
8896                                  (const camera3_capture_request_t *request,
8897                                   metadata_buffer_t *hal_metadata,
8898                                   uint32_t snapshotStreamId)
8899{
8900    int rc = 0;
8901    CameraMetadata frame_settings;
8902    frame_settings = request->settings;
8903
8904    /* Do not change the order of the following list unless you know what you are
8905     * doing.
8906     * The order is laid out in such a way that parameters in the front of the table
8907     * may be used to override the parameters later in the table. Examples are:
8908     * 1. META_MODE should precede AEC/AWB/AF MODE
8909     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
8910     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
8911     * 4. Any mode should precede it's corresponding settings
8912     */
8913    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
8914        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
8915        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
8916            rc = BAD_VALUE;
8917        }
8918        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8919        if (rc != NO_ERROR) {
8920            LOGE("extractSceneMode failed");
8921        }
8922    }
8923
8924    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8925        uint8_t fwk_aeMode =
8926            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8927        uint8_t aeMode;
8928        int32_t redeye;
8929
8930        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8931            aeMode = CAM_AE_MODE_OFF;
8932        } else {
8933            aeMode = CAM_AE_MODE_ON;
8934        }
8935        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8936            redeye = 1;
8937        } else {
8938            redeye = 0;
8939        }
8940
8941        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8942                fwk_aeMode);
8943        if (NAME_NOT_FOUND != val) {
8944            int32_t flashMode = (int32_t)val;
8945            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8946        }
8947
8948        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8949        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8950            rc = BAD_VALUE;
8951        }
8952    }
8953
8954    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8955        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8956        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8957                fwk_whiteLevel);
8958        if (NAME_NOT_FOUND != val) {
8959            uint8_t whiteLevel = (uint8_t)val;
8960            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8961                rc = BAD_VALUE;
8962            }
8963        }
8964    }
8965
8966    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8967        uint8_t fwk_cacMode =
8968                frame_settings.find(
8969                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8970        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8971                fwk_cacMode);
8972        if (NAME_NOT_FOUND != val) {
8973            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8974            bool entryAvailable = FALSE;
8975            // Check whether Frameworks set CAC mode is supported in device or not
8976            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8977                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
8978                    entryAvailable = TRUE;
8979                    break;
8980                }
8981            }
8982            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
8983            // If entry not found then set the device supported mode instead of frameworks mode i.e,
8984            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
8985            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
8986            if (entryAvailable == FALSE) {
8987                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8988                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8989                } else {
8990                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8991                        // High is not supported and so set the FAST as spec say's underlying
8992                        // device implementation can be the same for both modes.
8993                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
8994                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8995                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
8996                        // in order to avoid the fps drop due to high quality
8997                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
8998                    } else {
8999                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9000                    }
9001                }
9002            }
9003            LOGD("Final cacMode is %d", cacMode);
9004            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9005                rc = BAD_VALUE;
9006            }
9007        } else {
9008            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9009        }
9010    }
9011
9012    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9013        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9014        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9015                fwk_focusMode);
9016        if (NAME_NOT_FOUND != val) {
9017            uint8_t focusMode = (uint8_t)val;
9018            LOGD("set focus mode %d", focusMode);
9019            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9020                rc = BAD_VALUE;
9021            }
9022        }
9023    }
9024
9025    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9026        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9027        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9028                focalDistance)) {
9029            rc = BAD_VALUE;
9030        }
9031    }
9032
9033    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9034        uint8_t fwk_antibandingMode =
9035                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9036        int val = lookupHalName(ANTIBANDING_MODES_MAP,
9037                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9038        if (NAME_NOT_FOUND != val) {
9039            uint32_t hal_antibandingMode = (uint32_t)val;
9040            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9041                    hal_antibandingMode)) {
9042                rc = BAD_VALUE;
9043            }
9044        }
9045    }
9046
9047    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9048        int32_t expCompensation = frame_settings.find(
9049                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9050        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9051            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9052        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9053            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9054        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9055                expCompensation)) {
9056            rc = BAD_VALUE;
9057        }
9058    }
9059
9060    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9061        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9062        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9063            rc = BAD_VALUE;
9064        }
9065    }
9066    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9067        rc = setHalFpsRange(frame_settings, hal_metadata);
9068        if (rc != NO_ERROR) {
9069            LOGE("setHalFpsRange failed");
9070        }
9071    }
9072
9073    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9074        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9075        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9076            rc = BAD_VALUE;
9077        }
9078    }
9079
9080    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9081        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9082        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9083                fwk_effectMode);
9084        if (NAME_NOT_FOUND != val) {
9085            uint8_t effectMode = (uint8_t)val;
9086            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9087                rc = BAD_VALUE;
9088            }
9089        }
9090    }
9091
9092    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9093        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9094        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9095                colorCorrectMode)) {
9096            rc = BAD_VALUE;
9097        }
9098    }
9099
9100    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9101        cam_color_correct_gains_t colorCorrectGains;
9102        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
9103            colorCorrectGains.gains[i] =
9104                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9105        }
9106        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9107                colorCorrectGains)) {
9108            rc = BAD_VALUE;
9109        }
9110    }
9111
9112    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9113        cam_color_correct_matrix_t colorCorrectTransform;
9114        cam_rational_type_t transform_elem;
9115        size_t num = 0;
9116        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9117           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9118              transform_elem.numerator =
9119                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9120              transform_elem.denominator =
9121                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9122              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9123              num++;
9124           }
9125        }
9126        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9127                colorCorrectTransform)) {
9128            rc = BAD_VALUE;
9129        }
9130    }
9131
9132    cam_trigger_t aecTrigger;
9133    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9134    aecTrigger.trigger_id = -1;
9135    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9136        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9137        aecTrigger.trigger =
9138            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9139        aecTrigger.trigger_id =
9140            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9141        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9142                aecTrigger)) {
9143            rc = BAD_VALUE;
9144        }
9145        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9146                aecTrigger.trigger, aecTrigger.trigger_id);
9147    }
9148
9149    /*af_trigger must come with a trigger id*/
9150    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9151        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9152        cam_trigger_t af_trigger;
9153        af_trigger.trigger =
9154            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9155        af_trigger.trigger_id =
9156            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9157        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9158            rc = BAD_VALUE;
9159        }
9160        LOGD("AfTrigger: %d AfTriggerID: %d",
9161                af_trigger.trigger, af_trigger.trigger_id);
9162    }
9163
9164    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9165        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9166        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9167            rc = BAD_VALUE;
9168        }
9169    }
9170    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9171        cam_edge_application_t edge_application;
9172        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9173        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9174            edge_application.sharpness = 0;
9175        } else {
9176            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9177        }
9178        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9179            rc = BAD_VALUE;
9180        }
9181    }
9182
9183    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9184        int32_t respectFlashMode = 1;
9185        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9186            uint8_t fwk_aeMode =
9187                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9188            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
9189                respectFlashMode = 0;
9190                LOGH("AE Mode controls flash, ignore android.flash.mode");
9191            }
9192        }
9193        if (respectFlashMode) {
9194            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9195                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9196            LOGH("flash mode after mapping %d", val);
9197            // To check: CAM_INTF_META_FLASH_MODE usage
9198            if (NAME_NOT_FOUND != val) {
9199                uint8_t flashMode = (uint8_t)val;
9200                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9201                    rc = BAD_VALUE;
9202                }
9203            }
9204        }
9205    }
9206
9207    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9208        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9209        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9210            rc = BAD_VALUE;
9211        }
9212    }
9213
9214    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9215        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9216        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9217                flashFiringTime)) {
9218            rc = BAD_VALUE;
9219        }
9220    }
9221
9222    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9223        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9224        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9225                hotPixelMode)) {
9226            rc = BAD_VALUE;
9227        }
9228    }
9229
9230    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9231        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9232        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9233                lensAperture)) {
9234            rc = BAD_VALUE;
9235        }
9236    }
9237
9238    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9239        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9240        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9241                filterDensity)) {
9242            rc = BAD_VALUE;
9243        }
9244    }
9245
9246    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9247        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9248        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9249                focalLength)) {
9250            rc = BAD_VALUE;
9251        }
9252    }
9253
9254    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9255        uint8_t optStabMode =
9256                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9257        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9258                optStabMode)) {
9259            rc = BAD_VALUE;
9260        }
9261    }
9262
9263    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9264        uint8_t videoStabMode =
9265                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9266        LOGD("videoStabMode from APP = %d", videoStabMode);
9267        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9268                videoStabMode)) {
9269            rc = BAD_VALUE;
9270        }
9271    }
9272
9273
9274    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9275        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9276        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9277                noiseRedMode)) {
9278            rc = BAD_VALUE;
9279        }
9280    }
9281
9282    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9283        float reprocessEffectiveExposureFactor =
9284            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9285        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9286                reprocessEffectiveExposureFactor)) {
9287            rc = BAD_VALUE;
9288        }
9289    }
9290
9291    cam_crop_region_t scalerCropRegion;
9292    bool scalerCropSet = false;
9293    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9294        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9295        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9296        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9297        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9298
9299        // Map coordinate system from active array to sensor output.
9300        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9301                scalerCropRegion.width, scalerCropRegion.height);
9302
9303        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9304                scalerCropRegion)) {
9305            rc = BAD_VALUE;
9306        }
9307        scalerCropSet = true;
9308    }
9309
9310    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9311        int64_t sensorExpTime =
9312                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9313        LOGD("setting sensorExpTime %lld", sensorExpTime);
9314        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9315                sensorExpTime)) {
9316            rc = BAD_VALUE;
9317        }
9318    }
9319
9320    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9321        int64_t sensorFrameDuration =
9322                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9323        int64_t minFrameDuration = getMinFrameDuration(request);
9324        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9325        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9326            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9327        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9328        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9329                sensorFrameDuration)) {
9330            rc = BAD_VALUE;
9331        }
9332    }
9333
9334    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9335        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9336        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9337                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9338        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9339                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9340        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9341        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9342                sensorSensitivity)) {
9343            rc = BAD_VALUE;
9344        }
9345    }
9346
9347    if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9348        int32_t ispSensitivity =
9349            frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9350        if (ispSensitivity <
9351            gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9352                ispSensitivity =
9353                    gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9354                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9355        }
9356        if (ispSensitivity >
9357            gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9358                ispSensitivity =
9359                    gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9360                LOGD("clamp ispSensitivity to %d", ispSensitivity);
9361        }
9362        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9363                ispSensitivity)) {
9364            rc = BAD_VALUE;
9365        }
9366    }
9367
9368    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9369        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9370        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9371            rc = BAD_VALUE;
9372        }
9373    }
9374
9375    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9376        uint8_t fwk_facedetectMode =
9377                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9378
9379        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9380                fwk_facedetectMode);
9381
9382        if (NAME_NOT_FOUND != val) {
9383            uint8_t facedetectMode = (uint8_t)val;
9384            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9385                    facedetectMode)) {
9386                rc = BAD_VALUE;
9387            }
9388        }
9389    }
9390
9391    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9392        uint8_t histogramMode =
9393                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9394        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9395                histogramMode)) {
9396            rc = BAD_VALUE;
9397        }
9398    }
9399
9400    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9401        uint8_t sharpnessMapMode =
9402                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9403        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9404                sharpnessMapMode)) {
9405            rc = BAD_VALUE;
9406        }
9407    }
9408
9409    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9410        uint8_t tonemapMode =
9411                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9412        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9413            rc = BAD_VALUE;
9414        }
9415    }
9416    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9417    /*All tonemap channels will have the same number of points*/
9418    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9419        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9420        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9421        cam_rgb_tonemap_curves tonemapCurves;
9422        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9423        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9424            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9425                     tonemapCurves.tonemap_points_cnt,
9426                    CAM_MAX_TONEMAP_CURVE_SIZE);
9427            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9428        }
9429
9430        /* ch0 = G*/
9431        size_t point = 0;
9432        cam_tonemap_curve_t tonemapCurveGreen;
9433        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9434            for (size_t j = 0; j < 2; j++) {
9435               tonemapCurveGreen.tonemap_points[i][j] =
9436                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9437               point++;
9438            }
9439        }
9440        tonemapCurves.curves[0] = tonemapCurveGreen;
9441
9442        /* ch 1 = B */
9443        point = 0;
9444        cam_tonemap_curve_t tonemapCurveBlue;
9445        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9446            for (size_t j = 0; j < 2; j++) {
9447               tonemapCurveBlue.tonemap_points[i][j] =
9448                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9449               point++;
9450            }
9451        }
9452        tonemapCurves.curves[1] = tonemapCurveBlue;
9453
9454        /* ch 2 = R */
9455        point = 0;
9456        cam_tonemap_curve_t tonemapCurveRed;
9457        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9458            for (size_t j = 0; j < 2; j++) {
9459               tonemapCurveRed.tonemap_points[i][j] =
9460                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9461               point++;
9462            }
9463        }
9464        tonemapCurves.curves[2] = tonemapCurveRed;
9465
9466        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9467                tonemapCurves)) {
9468            rc = BAD_VALUE;
9469        }
9470    }
9471
9472    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9473        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9474        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9475                captureIntent)) {
9476            rc = BAD_VALUE;
9477        }
9478    }
9479
9480    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9481        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9482        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9483                blackLevelLock)) {
9484            rc = BAD_VALUE;
9485        }
9486    }
9487
9488    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9489        uint8_t lensShadingMapMode =
9490                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9491        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9492                lensShadingMapMode)) {
9493            rc = BAD_VALUE;
9494        }
9495    }
9496
9497    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9498        cam_area_t roi;
9499        bool reset = true;
9500        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9501
9502        // Map coordinate system from active array to sensor output.
9503        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9504                roi.rect.height);
9505
9506        if (scalerCropSet) {
9507            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9508        }
9509        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9510            rc = BAD_VALUE;
9511        }
9512    }
9513
9514    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9515        cam_area_t roi;
9516        bool reset = true;
9517        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9518
9519        // Map coordinate system from active array to sensor output.
9520        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9521                roi.rect.height);
9522
9523        if (scalerCropSet) {
9524            reset = resetIfNeededROI(&roi, &scalerCropRegion);
9525        }
9526        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9527            rc = BAD_VALUE;
9528        }
9529    }
9530
9531    // CDS for non-HFR non-video mode
9532    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9533            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9534        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9535        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9536            LOGE("Invalid CDS mode %d!", *fwk_cds);
9537        } else {
9538            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9539                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9540                rc = BAD_VALUE;
9541            }
9542        }
9543    }
9544
9545    // TNR
9546    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9547        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9548        uint8_t b_TnrRequested = 0;
9549        cam_denoise_param_t tnr;
9550        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9551        tnr.process_plates =
9552            (cam_denoise_process_type_t)frame_settings.find(
9553            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9554        b_TnrRequested = tnr.denoise_enable;
9555        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9556            rc = BAD_VALUE;
9557        }
9558    }
9559
9560    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9561        int32_t fwk_testPatternMode =
9562                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9563        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9564                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9565
9566        if (NAME_NOT_FOUND != testPatternMode) {
9567            cam_test_pattern_data_t testPatternData;
9568            memset(&testPatternData, 0, sizeof(testPatternData));
9569            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9570            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9571                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9572                int32_t *fwk_testPatternData =
9573                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9574                testPatternData.r = fwk_testPatternData[0];
9575                testPatternData.b = fwk_testPatternData[3];
9576                switch (gCamCapability[mCameraId]->color_arrangement) {
9577                    case CAM_FILTER_ARRANGEMENT_RGGB:
9578                    case CAM_FILTER_ARRANGEMENT_GRBG:
9579                        testPatternData.gr = fwk_testPatternData[1];
9580                        testPatternData.gb = fwk_testPatternData[2];
9581                        break;
9582                    case CAM_FILTER_ARRANGEMENT_GBRG:
9583                    case CAM_FILTER_ARRANGEMENT_BGGR:
9584                        testPatternData.gr = fwk_testPatternData[2];
9585                        testPatternData.gb = fwk_testPatternData[1];
9586                        break;
9587                    default:
9588                        LOGE("color arrangement %d is not supported",
9589                                gCamCapability[mCameraId]->color_arrangement);
9590                        break;
9591                }
9592            }
9593            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9594                    testPatternData)) {
9595                rc = BAD_VALUE;
9596            }
9597        } else {
9598            LOGE("Invalid framework sensor test pattern mode %d",
9599                    fwk_testPatternMode);
9600        }
9601    }
9602
9603    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9604        size_t count = 0;
9605        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9606        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9607                gps_coords.data.d, gps_coords.count, count);
9608        if (gps_coords.count != count) {
9609            rc = BAD_VALUE;
9610        }
9611    }
9612
9613    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9614        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9615        size_t count = 0;
9616        const char *gps_methods_src = (const char *)
9617                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9618        memset(gps_methods, '\0', sizeof(gps_methods));
9619        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9620        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9621                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9622        if (GPS_PROCESSING_METHOD_SIZE != count) {
9623            rc = BAD_VALUE;
9624        }
9625    }
9626
9627    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9628        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9629        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9630                gps_timestamp)) {
9631            rc = BAD_VALUE;
9632        }
9633    }
9634
9635    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9636        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9637        cam_rotation_info_t rotation_info;
9638        if (orientation == 0) {
9639           rotation_info.rotation = ROTATE_0;
9640        } else if (orientation == 90) {
9641           rotation_info.rotation = ROTATE_90;
9642        } else if (orientation == 180) {
9643           rotation_info.rotation = ROTATE_180;
9644        } else if (orientation == 270) {
9645           rotation_info.rotation = ROTATE_270;
9646        }
9647        rotation_info.streamId = snapshotStreamId;
9648        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9649        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9650            rc = BAD_VALUE;
9651        }
9652    }
9653
9654    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9655        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9656        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9657            rc = BAD_VALUE;
9658        }
9659    }
9660
9661    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9662        uint32_t thumb_quality = (uint32_t)
9663                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9664        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9665                thumb_quality)) {
9666            rc = BAD_VALUE;
9667        }
9668    }
9669
9670    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9671        cam_dimension_t dim;
9672        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9673        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9674        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9675            rc = BAD_VALUE;
9676        }
9677    }
9678
9679    // Internal metadata
9680    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9681        size_t count = 0;
9682        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9683        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9684                privatedata.data.i32, privatedata.count, count);
9685        if (privatedata.count != count) {
9686            rc = BAD_VALUE;
9687        }
9688    }
9689
9690    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
9691        uint8_t* use_av_timer =
9692                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
9693        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
9694            rc = BAD_VALUE;
9695        }
9696    }
9697
9698    // EV step
9699    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9700            gCamCapability[mCameraId]->exp_compensation_step)) {
9701        rc = BAD_VALUE;
9702    }
9703
9704    // CDS info
9705    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9706        cam_cds_data_t *cdsData = (cam_cds_data_t *)
9707                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9708
9709        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9710                CAM_INTF_META_CDS_DATA, *cdsData)) {
9711            rc = BAD_VALUE;
9712        }
9713    }
9714
9715    // Hybrid AE
9716    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
9717        uint8_t *hybrid_ae = (uint8_t *)
9718                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
9719
9720        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9721                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
9722            rc = BAD_VALUE;
9723        }
9724    }
9725
9726    return rc;
9727}
9728
9729/*===========================================================================
9730 * FUNCTION   : captureResultCb
9731 *
9732 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9733 *
9734 * PARAMETERS :
9735 *   @frame  : frame information from mm-camera-interface
9736 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9737 *   @userdata: userdata
9738 *
9739 * RETURN     : NONE
9740 *==========================================================================*/
9741void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9742                camera3_stream_buffer_t *buffer,
9743                uint32_t frame_number, bool isInputBuffer, void *userdata)
9744{
9745    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9746    if (hw == NULL) {
9747        LOGE("Invalid hw %p", hw);
9748        return;
9749    }
9750
9751    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9752    return;
9753}
9754
9755
9756/*===========================================================================
9757 * FUNCTION   : initialize
9758 *
9759 * DESCRIPTION: Pass framework callback pointers to HAL
9760 *
9761 * PARAMETERS :
9762 *
9763 *
9764 * RETURN     : Success : 0
9765 *              Failure: -ENODEV
9766 *==========================================================================*/
9767
9768int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9769                                  const camera3_callback_ops_t *callback_ops)
9770{
9771    LOGD("E");
9772    QCamera3HardwareInterface *hw =
9773        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9774    if (!hw) {
9775        LOGE("NULL camera device");
9776        return -ENODEV;
9777    }
9778
9779    int rc = hw->initialize(callback_ops);
9780    LOGD("X");
9781    return rc;
9782}
9783
9784/*===========================================================================
9785 * FUNCTION   : configure_streams
9786 *
9787 * DESCRIPTION:
9788 *
9789 * PARAMETERS :
9790 *
9791 *
9792 * RETURN     : Success: 0
9793 *              Failure: -EINVAL (if stream configuration is invalid)
9794 *                       -ENODEV (fatal error)
9795 *==========================================================================*/
9796
9797int QCamera3HardwareInterface::configure_streams(
9798        const struct camera3_device *device,
9799        camera3_stream_configuration_t *stream_list)
9800{
9801    LOGD("E");
9802    QCamera3HardwareInterface *hw =
9803        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9804    if (!hw) {
9805        LOGE("NULL camera device");
9806        return -ENODEV;
9807    }
9808    int rc = hw->configureStreams(stream_list);
9809    LOGD("X");
9810    return rc;
9811}
9812
9813/*===========================================================================
9814 * FUNCTION   : construct_default_request_settings
9815 *
9816 * DESCRIPTION: Configure a settings buffer to meet the required use case
9817 *
9818 * PARAMETERS :
9819 *
9820 *
9821 * RETURN     : Success: Return valid metadata
9822 *              Failure: Return NULL
9823 *==========================================================================*/
9824const camera_metadata_t* QCamera3HardwareInterface::
9825    construct_default_request_settings(const struct camera3_device *device,
9826                                        int type)
9827{
9828
9829    LOGD("E");
9830    camera_metadata_t* fwk_metadata = NULL;
9831    QCamera3HardwareInterface *hw =
9832        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9833    if (!hw) {
9834        LOGE("NULL camera device");
9835        return NULL;
9836    }
9837
9838    fwk_metadata = hw->translateCapabilityToMetadata(type);
9839
9840    LOGD("X");
9841    return fwk_metadata;
9842}
9843
9844/*===========================================================================
9845 * FUNCTION   : process_capture_request
9846 *
9847 * DESCRIPTION:
9848 *
9849 * PARAMETERS :
9850 *
9851 *
9852 * RETURN     :
9853 *==========================================================================*/
9854int QCamera3HardwareInterface::process_capture_request(
9855                    const struct camera3_device *device,
9856                    camera3_capture_request_t *request)
9857{
9858    LOGD("E");
9859    QCamera3HardwareInterface *hw =
9860        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9861    if (!hw) {
9862        LOGE("NULL camera device");
9863        return -EINVAL;
9864    }
9865
9866    int rc = hw->processCaptureRequest(request);
9867    LOGD("X");
9868    return rc;
9869}
9870
9871/*===========================================================================
9872 * FUNCTION   : dump
9873 *
9874 * DESCRIPTION:
9875 *
9876 * PARAMETERS :
9877 *
9878 *
9879 * RETURN     :
9880 *==========================================================================*/
9881
9882void QCamera3HardwareInterface::dump(
9883                const struct camera3_device *device, int fd)
9884{
9885    /* Log level property is read when "adb shell dumpsys media.camera" is
9886       called so that the log level can be controlled without restarting
9887       the media server */
9888    getLogLevel();
9889
9890    LOGD("E");
9891    QCamera3HardwareInterface *hw =
9892        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9893    if (!hw) {
9894        LOGE("NULL camera device");
9895        return;
9896    }
9897
9898    hw->dump(fd);
9899    LOGD("X");
9900    return;
9901}
9902
9903/*===========================================================================
9904 * FUNCTION   : flush
9905 *
9906 * DESCRIPTION:
9907 *
9908 * PARAMETERS :
9909 *
9910 *
9911 * RETURN     :
9912 *==========================================================================*/
9913
9914int QCamera3HardwareInterface::flush(
9915                const struct camera3_device *device)
9916{
9917    int rc;
9918    LOGD("E");
9919    QCamera3HardwareInterface *hw =
9920        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9921    if (!hw) {
9922        LOGE("NULL camera device");
9923        return -EINVAL;
9924    }
9925
9926    pthread_mutex_lock(&hw->mMutex);
9927    // Validate current state
9928    switch (hw->mState) {
9929        case STARTED:
9930            /* valid state */
9931            break;
9932
9933        case ERROR:
9934            pthread_mutex_unlock(&hw->mMutex);
9935            hw->handleCameraDeviceError();
9936            return -ENODEV;
9937
9938        default:
9939            LOGI("Flush returned during state %d", hw->mState);
9940            pthread_mutex_unlock(&hw->mMutex);
9941            return 0;
9942    }
9943    pthread_mutex_unlock(&hw->mMutex);
9944
9945    rc = hw->flush(true /* restart channels */ );
9946    LOGD("X");
9947    return rc;
9948}
9949
9950/*===========================================================================
9951 * FUNCTION   : close_camera_device
9952 *
9953 * DESCRIPTION:
9954 *
9955 * PARAMETERS :
9956 *
9957 *
9958 * RETURN     :
9959 *==========================================================================*/
9960int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
9961{
9962    int ret = NO_ERROR;
9963    QCamera3HardwareInterface *hw =
9964        reinterpret_cast<QCamera3HardwareInterface *>(
9965            reinterpret_cast<camera3_device_t *>(device)->priv);
9966    if (!hw) {
9967        LOGE("NULL camera device");
9968        return BAD_VALUE;
9969    }
9970
9971    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
9972    delete hw;
9973    LOGI("[KPI Perf]: X");
9974    return ret;
9975}
9976
9977/*===========================================================================
9978 * FUNCTION   : getWaveletDenoiseProcessPlate
9979 *
9980 * DESCRIPTION: query wavelet denoise process plate
9981 *
9982 * PARAMETERS : None
9983 *
9984 * RETURN     : WNR prcocess plate value
9985 *==========================================================================*/
9986cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9987{
9988    char prop[PROPERTY_VALUE_MAX];
9989    memset(prop, 0, sizeof(prop));
9990    property_get("persist.denoise.process.plates", prop, "0");
9991    int processPlate = atoi(prop);
9992    switch(processPlate) {
9993    case 0:
9994        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9995    case 1:
9996        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9997    case 2:
9998        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9999    case 3:
10000        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10001    default:
10002        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10003    }
10004}
10005
10006
10007/*===========================================================================
10008 * FUNCTION   : getTemporalDenoiseProcessPlate
10009 *
10010 * DESCRIPTION: query temporal denoise process plate
10011 *
10012 * PARAMETERS : None
10013 *
10014 * RETURN     : TNR prcocess plate value
10015 *==========================================================================*/
10016cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10017{
10018    char prop[PROPERTY_VALUE_MAX];
10019    memset(prop, 0, sizeof(prop));
10020    property_get("persist.tnr.process.plates", prop, "0");
10021    int processPlate = atoi(prop);
10022    switch(processPlate) {
10023    case 0:
10024        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10025    case 1:
10026        return CAM_WAVELET_DENOISE_CBCR_ONLY;
10027    case 2:
10028        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10029    case 3:
10030        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10031    default:
10032        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10033    }
10034}
10035
10036
10037/*===========================================================================
10038 * FUNCTION   : extractSceneMode
10039 *
10040 * DESCRIPTION: Extract scene mode from frameworks set metadata
10041 *
10042 * PARAMETERS :
10043 *      @frame_settings: CameraMetadata reference
10044 *      @metaMode: ANDROID_CONTORL_MODE
10045 *      @hal_metadata: hal metadata structure
10046 *
10047 * RETURN     : None
10048 *==========================================================================*/
10049int32_t QCamera3HardwareInterface::extractSceneMode(
10050        const CameraMetadata &frame_settings, uint8_t metaMode,
10051        metadata_buffer_t *hal_metadata)
10052{
10053    int32_t rc = NO_ERROR;
10054
10055    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10056        camera_metadata_ro_entry entry =
10057                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10058        if (0 == entry.count)
10059            return rc;
10060
10061        uint8_t fwk_sceneMode = entry.data.u8[0];
10062
10063        int val = lookupHalName(SCENE_MODES_MAP,
10064                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10065                fwk_sceneMode);
10066        if (NAME_NOT_FOUND != val) {
10067            uint8_t sceneMode = (uint8_t)val;
10068            LOGD("sceneMode: %d", sceneMode);
10069            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10070                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10071                rc = BAD_VALUE;
10072            }
10073        }
10074    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10075            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10076        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10077        LOGD("sceneMode: %d", sceneMode);
10078        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10079                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10080            rc = BAD_VALUE;
10081        }
10082    }
10083    return rc;
10084}
10085
10086/*===========================================================================
10087 * FUNCTION   : needRotationReprocess
10088 *
10089 * DESCRIPTION: if rotation needs to be done by reprocess in pp
10090 *
10091 * PARAMETERS : none
10092 *
10093 * RETURN     : true: needed
10094 *              false: no need
10095 *==========================================================================*/
10096bool QCamera3HardwareInterface::needRotationReprocess()
10097{
10098    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10099        // current rotation is not zero, and pp has the capability to process rotation
10100        LOGH("need do reprocess for rotation");
10101        return true;
10102    }
10103
10104    return false;
10105}
10106
10107/*===========================================================================
10108 * FUNCTION   : needReprocess
10109 *
10110 * DESCRIPTION: if reprocess in needed
10111 *
10112 * PARAMETERS : none
10113 *
10114 * RETURN     : true: needed
10115 *              false: no need
10116 *==========================================================================*/
10117bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10118{
10119    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10120        // TODO: add for ZSL HDR later
10121        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10122        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10123            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10124            return true;
10125        } else {
10126            LOGH("already post processed frame");
10127            return false;
10128        }
10129    }
10130    return needRotationReprocess();
10131}
10132
10133/*===========================================================================
10134 * FUNCTION   : needJpegExifRotation
10135 *
10136 * DESCRIPTION: if rotation from jpeg is needed
10137 *
10138 * PARAMETERS : none
10139 *
10140 * RETURN     : true: needed
10141 *              false: no need
10142 *==========================================================================*/
10143bool QCamera3HardwareInterface::needJpegExifRotation()
10144{
10145   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10146    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10147       LOGD("Need use Jpeg EXIF Rotation");
10148       return true;
10149    }
10150    return false;
10151}
10152
10153/*===========================================================================
10154 * FUNCTION   : addOfflineReprocChannel
10155 *
10156 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10157 *              coming from input channel
10158 *
10159 * PARAMETERS :
10160 *   @config  : reprocess configuration
10161 *   @inputChHandle : pointer to the input (source) channel
10162 *
10163 *
10164 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
10165 *==========================================================================*/
10166QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10167        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10168{
10169    int32_t rc = NO_ERROR;
10170    QCamera3ReprocessChannel *pChannel = NULL;
10171
10172    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10173            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
10174            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10175    if (NULL == pChannel) {
10176        LOGE("no mem for reprocess channel");
10177        return NULL;
10178    }
10179
10180    rc = pChannel->initialize(IS_TYPE_NONE);
10181    if (rc != NO_ERROR) {
10182        LOGE("init reprocess channel failed, ret = %d", rc);
10183        delete pChannel;
10184        return NULL;
10185    }
10186
10187    // pp feature config
10188    cam_pp_feature_config_t pp_config;
10189    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10190
10191    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10192    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10193            & CAM_QCOM_FEATURE_DSDN) {
10194        //Use CPP CDS incase h/w supports it.
10195        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10196        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10197    }
10198    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10199        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10200    }
10201
10202    rc = pChannel->addReprocStreamsFromSource(pp_config,
10203            config,
10204            IS_TYPE_NONE,
10205            mMetadataChannel);
10206
10207    if (rc != NO_ERROR) {
10208        delete pChannel;
10209        return NULL;
10210    }
10211    return pChannel;
10212}
10213
10214/*===========================================================================
10215 * FUNCTION   : getMobicatMask
10216 *
10217 * DESCRIPTION: returns mobicat mask
10218 *
10219 * PARAMETERS : none
10220 *
10221 * RETURN     : mobicat mask
10222 *
10223 *==========================================================================*/
10224uint8_t QCamera3HardwareInterface::getMobicatMask()
10225{
10226    return m_MobicatMask;
10227}
10228
10229/*===========================================================================
10230 * FUNCTION   : setMobicat
10231 *
10232 * DESCRIPTION: set Mobicat on/off.
10233 *
10234 * PARAMETERS :
10235 *   @params  : none
10236 *
10237 * RETURN     : int32_t type of status
10238 *              NO_ERROR  -- success
10239 *              none-zero failure code
10240 *==========================================================================*/
10241int32_t QCamera3HardwareInterface::setMobicat()
10242{
10243    char value [PROPERTY_VALUE_MAX];
10244    property_get("persist.camera.mobicat", value, "0");
10245    int32_t ret = NO_ERROR;
10246    uint8_t enableMobi = (uint8_t)atoi(value);
10247
10248    if (enableMobi) {
10249        tune_cmd_t tune_cmd;
10250        tune_cmd.type = SET_RELOAD_CHROMATIX;
10251        tune_cmd.module = MODULE_ALL;
10252        tune_cmd.value = TRUE;
10253        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10254                CAM_INTF_PARM_SET_VFE_COMMAND,
10255                tune_cmd);
10256
10257        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10258                CAM_INTF_PARM_SET_PP_COMMAND,
10259                tune_cmd);
10260    }
10261    m_MobicatMask = enableMobi;
10262
10263    return ret;
10264}
10265
10266/*===========================================================================
10267* FUNCTION   : getLogLevel
10268*
10269* DESCRIPTION: Reads the log level property into a variable
10270*
10271* PARAMETERS :
10272*   None
10273*
10274* RETURN     :
10275*   None
10276*==========================================================================*/
10277void QCamera3HardwareInterface::getLogLevel()
10278{
10279    char prop[PROPERTY_VALUE_MAX];
10280    uint32_t globalLogLevel = 0;
10281
10282    property_get("persist.camera.hal.debug", prop, "0");
10283    int val = atoi(prop);
10284    if (0 <= val) {
10285        gCamHal3LogLevel = (uint32_t)val;
10286    }
10287
10288    property_get("persist.camera.kpi.debug", prop, "1");
10289    gKpiDebugLevel = atoi(prop);
10290
10291    property_get("persist.camera.global.debug", prop, "0");
10292    val = atoi(prop);
10293    if (0 <= val) {
10294        globalLogLevel = (uint32_t)val;
10295    }
10296
10297    /* Highest log level among hal.logs and global.logs is selected */
10298    if (gCamHal3LogLevel < globalLogLevel)
10299        gCamHal3LogLevel = globalLogLevel;
10300
10301    return;
10302}
10303
10304/*===========================================================================
10305 * FUNCTION   : validateStreamRotations
10306 *
10307 * DESCRIPTION: Check if the rotations requested are supported
10308 *
10309 * PARAMETERS :
10310 *   @stream_list : streams to be configured
10311 *
10312 * RETURN     : NO_ERROR on success
10313 *              -EINVAL on failure
10314 *
10315 *==========================================================================*/
10316int QCamera3HardwareInterface::validateStreamRotations(
10317        camera3_stream_configuration_t *streamList)
10318{
10319    int rc = NO_ERROR;
10320
10321    /*
10322    * Loop through all streams requested in configuration
10323    * Check if unsupported rotations have been requested on any of them
10324    */
10325    for (size_t j = 0; j < streamList->num_streams; j++){
10326        camera3_stream_t *newStream = streamList->streams[j];
10327
10328        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10329        bool isImplDef = (newStream->format ==
10330                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10331        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10332                isImplDef);
10333
10334        if (isRotated && (!isImplDef || isZsl)) {
10335            LOGE("Error: Unsupported rotation of %d requested for stream"
10336                    "type:%d and stream format:%d",
10337                    newStream->rotation, newStream->stream_type,
10338                    newStream->format);
10339            rc = -EINVAL;
10340            break;
10341        }
10342    }
10343
10344    return rc;
10345}
10346
10347/*===========================================================================
10348* FUNCTION   : getFlashInfo
10349*
10350* DESCRIPTION: Retrieve information about whether the device has a flash.
10351*
10352* PARAMETERS :
10353*   @cameraId  : Camera id to query
10354*   @hasFlash  : Boolean indicating whether there is a flash device
10355*                associated with given camera
10356*   @flashNode : If a flash device exists, this will be its device node.
10357*
10358* RETURN     :
10359*   None
10360*==========================================================================*/
10361void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10362        bool& hasFlash,
10363        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10364{
10365    cam_capability_t* camCapability = gCamCapability[cameraId];
10366    if (NULL == camCapability) {
10367        hasFlash = false;
10368        flashNode[0] = '\0';
10369    } else {
10370        hasFlash = camCapability->flash_available;
10371        strlcpy(flashNode,
10372                (char*)camCapability->flash_dev_name,
10373                QCAMERA_MAX_FILEPATH_LENGTH);
10374    }
10375}
10376
10377/*===========================================================================
10378* FUNCTION   : getEepromVersionInfo
10379*
10380* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10381*
10382* PARAMETERS : None
10383*
10384* RETURN     : string describing EEPROM version
10385*              "\0" if no such info available
10386*==========================================================================*/
10387const char *QCamera3HardwareInterface::getEepromVersionInfo()
10388{
10389    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10390}
10391
10392/*===========================================================================
10393* FUNCTION   : getLdafCalib
10394*
10395* DESCRIPTION: Retrieve Laser AF calibration data
10396*
10397* PARAMETERS : None
10398*
10399* RETURN     : Two uint32_t describing laser AF calibration data
10400*              NULL if none is available.
10401*==========================================================================*/
10402const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10403{
10404    if (mLdafCalibExist) {
10405        return &mLdafCalib[0];
10406    } else {
10407        return NULL;
10408    }
10409}
10410
10411/*===========================================================================
10412 * FUNCTION   : dynamicUpdateMetaStreamInfo
10413 *
10414 * DESCRIPTION: This function:
10415 *             (1) stops all the channels
10416 *             (2) returns error on pending requests and buffers
10417 *             (3) sends metastream_info in setparams
10418 *             (4) starts all channels
10419 *             This is useful when sensor has to be restarted to apply any
10420 *             settings such as frame rate from a different sensor mode
10421 *
10422 * PARAMETERS : None
10423 *
10424 * RETURN     : NO_ERROR on success
10425 *              Error codes on failure
10426 *
10427 *==========================================================================*/
10428int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10429{
10430    ATRACE_CALL();
10431    int rc = NO_ERROR;
10432
10433    LOGD("E");
10434
10435    rc = stopAllChannels();
10436    if (rc < 0) {
10437        LOGE("stopAllChannels failed");
10438        return rc;
10439    }
10440
10441    rc = notifyErrorForPendingRequests();
10442    if (rc < 0) {
10443        LOGE("notifyErrorForPendingRequests failed");
10444        return rc;
10445    }
10446
10447    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10448        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10449                "Format:%d",
10450                mStreamConfigInfo.type[i],
10451                mStreamConfigInfo.stream_sizes[i].width,
10452                mStreamConfigInfo.stream_sizes[i].height,
10453                mStreamConfigInfo.postprocess_mask[i],
10454                mStreamConfigInfo.format[i]);
10455    }
10456
10457    /* Send meta stream info once again so that ISP can start */
10458    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10459            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10460    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10461            mParameters);
10462    if (rc < 0) {
10463        LOGE("set Metastreaminfo failed. Sensor mode does not change");
10464    }
10465
10466    rc = startAllChannels();
10467    if (rc < 0) {
10468        LOGE("startAllChannels failed");
10469        return rc;
10470    }
10471
10472    LOGD("X");
10473    return rc;
10474}
10475
10476/*===========================================================================
10477 * FUNCTION   : stopAllChannels
10478 *
10479 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10480 *
10481 * PARAMETERS : None
10482 *
10483 * RETURN     : NO_ERROR on success
10484 *              Error codes on failure
10485 *
10486 *==========================================================================*/
10487int32_t QCamera3HardwareInterface::stopAllChannels()
10488{
10489    int32_t rc = NO_ERROR;
10490
10491    LOGD("Stopping all channels");
10492    // Stop the Streams/Channels
10493    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10494        it != mStreamInfo.end(); it++) {
10495        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10496        if (channel) {
10497            channel->stop();
10498        }
10499        (*it)->status = INVALID;
10500    }
10501
10502    if (mSupportChannel) {
10503        mSupportChannel->stop();
10504    }
10505    if (mAnalysisChannel) {
10506        mAnalysisChannel->stop();
10507    }
10508    if (mRawDumpChannel) {
10509        mRawDumpChannel->stop();
10510    }
10511    if (mMetadataChannel) {
10512        /* If content of mStreamInfo is not 0, there is metadata stream */
10513        mMetadataChannel->stop();
10514    }
10515
10516    LOGD("All channels stopped");
10517    return rc;
10518}
10519
10520/*===========================================================================
10521 * FUNCTION   : startAllChannels
10522 *
10523 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10524 *
10525 * PARAMETERS : None
10526 *
10527 * RETURN     : NO_ERROR on success
10528 *              Error codes on failure
10529 *
10530 *==========================================================================*/
10531int32_t QCamera3HardwareInterface::startAllChannels()
10532{
10533    int32_t rc = NO_ERROR;
10534
10535    LOGD("Start all channels ");
10536    // Start the Streams/Channels
10537    if (mMetadataChannel) {
10538        /* If content of mStreamInfo is not 0, there is metadata stream */
10539        rc = mMetadataChannel->start();
10540        if (rc < 0) {
10541            LOGE("META channel start failed");
10542            return rc;
10543        }
10544    }
10545    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10546        it != mStreamInfo.end(); it++) {
10547        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10548        if (channel) {
10549            rc = channel->start();
10550            if (rc < 0) {
10551                LOGE("channel start failed");
10552                return rc;
10553            }
10554        }
10555    }
10556    if (mAnalysisChannel) {
10557        mAnalysisChannel->start();
10558    }
10559    if (mSupportChannel) {
10560        rc = mSupportChannel->start();
10561        if (rc < 0) {
10562            LOGE("Support channel start failed");
10563            return rc;
10564        }
10565    }
10566    if (mRawDumpChannel) {
10567        rc = mRawDumpChannel->start();
10568        if (rc < 0) {
10569            LOGE("RAW dump channel start failed");
10570            return rc;
10571        }
10572    }
10573
10574    LOGD("All channels started");
10575    return rc;
10576}
10577
10578/*===========================================================================
10579 * FUNCTION   : notifyErrorForPendingRequests
10580 *
10581 * DESCRIPTION: This function sends error for all the pending requests/buffers
10582 *
10583 * PARAMETERS : None
10584 *
10585 * RETURN     : Error codes
10586 *              NO_ERROR on success
10587 *
10588 *==========================================================================*/
10589int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10590{
10591    int32_t rc = NO_ERROR;
10592    unsigned int frameNum = 0;
10593    camera3_capture_result_t result;
10594    camera3_stream_buffer_t *pStream_Buf = NULL;
10595
10596    memset(&result, 0, sizeof(camera3_capture_result_t));
10597
10598    if (mPendingRequestsList.size() > 0) {
10599        pendingRequestIterator i = mPendingRequestsList.begin();
10600        frameNum = i->frame_number;
10601    } else {
10602        /* There might still be pending buffers even though there are
10603         no pending requests. Setting the frameNum to MAX so that
10604         all the buffers with smaller frame numbers are returned */
10605        frameNum = UINT_MAX;
10606    }
10607
10608    LOGH("Oldest frame num on mPendingRequestsList = %u",
10609       frameNum);
10610
10611    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10612            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10613
10614        if (req->frame_number < frameNum) {
10615            // Send Error notify to frameworks for each buffer for which
10616            // metadata buffer is already sent
10617            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10618                req->frame_number, req->mPendingBufferList.size());
10619
10620            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10621            if (NULL == pStream_Buf) {
10622                LOGE("No memory for pending buffers array");
10623                return NO_MEMORY;
10624            }
10625            memset(pStream_Buf, 0,
10626                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10627            result.result = NULL;
10628            result.frame_number = req->frame_number;
10629            result.num_output_buffers = req->mPendingBufferList.size();
10630            result.output_buffers = pStream_Buf;
10631
10632            size_t index = 0;
10633            for (auto info = req->mPendingBufferList.begin();
10634                info != req->mPendingBufferList.end(); ) {
10635
10636                camera3_notify_msg_t notify_msg;
10637                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10638                notify_msg.type = CAMERA3_MSG_ERROR;
10639                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10640                notify_msg.message.error.error_stream = info->stream;
10641                notify_msg.message.error.frame_number = req->frame_number;
10642                pStream_Buf[index].acquire_fence = -1;
10643                pStream_Buf[index].release_fence = -1;
10644                pStream_Buf[index].buffer = info->buffer;
10645                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10646                pStream_Buf[index].stream = info->stream;
10647                mCallbackOps->notify(mCallbackOps, &notify_msg);
10648                index++;
10649                // Remove buffer from list
10650                info = req->mPendingBufferList.erase(info);
10651            }
10652
10653            // Remove this request from Map
10654            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10655                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10656            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10657
10658            mCallbackOps->process_capture_result(mCallbackOps, &result);
10659
10660            delete [] pStream_Buf;
10661        } else {
10662
10663            // Go through the pending requests info and send error request to framework
10664            LOGE("Sending ERROR REQUEST for all pending requests");
10665            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10666
10667            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
10668
10669            // Send error notify to frameworks
10670            camera3_notify_msg_t notify_msg;
10671            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10672            notify_msg.type = CAMERA3_MSG_ERROR;
10673            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10674            notify_msg.message.error.error_stream = NULL;
10675            notify_msg.message.error.frame_number = req->frame_number;
10676            mCallbackOps->notify(mCallbackOps, &notify_msg);
10677
10678            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10679            if (NULL == pStream_Buf) {
10680                LOGE("No memory for pending buffers array");
10681                return NO_MEMORY;
10682            }
10683            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10684
10685            result.result = NULL;
10686            result.frame_number = req->frame_number;
10687            result.input_buffer = i->input_buffer;
10688            result.num_output_buffers = req->mPendingBufferList.size();
10689            result.output_buffers = pStream_Buf;
10690
10691            size_t index = 0;
10692            for (auto info = req->mPendingBufferList.begin();
10693                info != req->mPendingBufferList.end(); ) {
10694                pStream_Buf[index].acquire_fence = -1;
10695                pStream_Buf[index].release_fence = -1;
10696                pStream_Buf[index].buffer = info->buffer;
10697                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10698                pStream_Buf[index].stream = info->stream;
10699                index++;
10700                // Remove buffer from list
10701                info = req->mPendingBufferList.erase(info);
10702            }
10703
10704            // Remove this request from Map
10705            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10706                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10707            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10708
10709            mCallbackOps->process_capture_result(mCallbackOps, &result);
10710            delete [] pStream_Buf;
10711            i = erasePendingRequest(i);
10712        }
10713    }
10714
10715    /* Reset pending frame Drop list and requests list */
10716    mPendingFrameDropList.clear();
10717
10718    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10719        req.mPendingBufferList.clear();
10720    }
10721    mPendingBuffersMap.mPendingBuffersInRequest.clear();
10722    mPendingReprocessResultList.clear();
10723    LOGH("Cleared all the pending buffers ");
10724
10725    return rc;
10726}
10727
10728bool QCamera3HardwareInterface::isOnEncoder(
10729        const cam_dimension_t max_viewfinder_size,
10730        uint32_t width, uint32_t height)
10731{
10732    return (width > (uint32_t)max_viewfinder_size.width ||
10733            height > (uint32_t)max_viewfinder_size.height);
10734}
10735
10736/*===========================================================================
10737 * FUNCTION   : setBundleInfo
10738 *
10739 * DESCRIPTION: Set bundle info for all streams that are bundle.
10740 *
10741 * PARAMETERS : None
10742 *
10743 * RETURN     : NO_ERROR on success
10744 *              Error codes on failure
10745 *==========================================================================*/
10746int32_t QCamera3HardwareInterface::setBundleInfo()
10747{
10748    int32_t rc = NO_ERROR;
10749
10750    if (mChannelHandle) {
10751        cam_bundle_config_t bundleInfo;
10752        memset(&bundleInfo, 0, sizeof(bundleInfo));
10753        rc = mCameraHandle->ops->get_bundle_info(
10754                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10755        if (rc != NO_ERROR) {
10756            LOGE("get_bundle_info failed");
10757            return rc;
10758        }
10759        if (mAnalysisChannel) {
10760            mAnalysisChannel->setBundleInfo(bundleInfo);
10761        }
10762        if (mSupportChannel) {
10763            mSupportChannel->setBundleInfo(bundleInfo);
10764        }
10765        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10766                it != mStreamInfo.end(); it++) {
10767            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10768            channel->setBundleInfo(bundleInfo);
10769        }
10770        if (mRawDumpChannel) {
10771            mRawDumpChannel->setBundleInfo(bundleInfo);
10772        }
10773    }
10774
10775    return rc;
10776}
10777
10778/*===========================================================================
10779 * FUNCTION   : get_num_overall_buffers
10780 *
10781 * DESCRIPTION: Estimate number of pending buffers across all requests.
10782 *
10783 * PARAMETERS : None
10784 *
10785 * RETURN     : Number of overall pending buffers
10786 *
10787 *==========================================================================*/
10788uint32_t PendingBuffersMap::get_num_overall_buffers()
10789{
10790    uint32_t sum_buffers = 0;
10791    for (auto &req : mPendingBuffersInRequest) {
10792        sum_buffers += req.mPendingBufferList.size();
10793    }
10794    return sum_buffers;
10795}
10796
10797/*===========================================================================
10798 * FUNCTION   : removeBuf
10799 *
10800 * DESCRIPTION: Remove a matching buffer from tracker.
10801 *
10802 * PARAMETERS : @buffer: image buffer for the callback
10803 *
10804 * RETURN     : None
10805 *
10806 *==========================================================================*/
10807void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10808{
10809    bool buffer_found = false;
10810    for (auto req = mPendingBuffersInRequest.begin();
10811            req != mPendingBuffersInRequest.end(); req++) {
10812        for (auto k = req->mPendingBufferList.begin();
10813                k != req->mPendingBufferList.end(); k++ ) {
10814            if (k->buffer == buffer) {
10815                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10816                        req->frame_number, buffer);
10817                k = req->mPendingBufferList.erase(k);
10818                if (req->mPendingBufferList.empty()) {
10819                    // Remove this request from Map
10820                    req = mPendingBuffersInRequest.erase(req);
10821                }
10822                buffer_found = true;
10823                break;
10824            }
10825        }
10826        if (buffer_found) {
10827            break;
10828        }
10829    }
10830    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
10831            get_num_overall_buffers());
10832}
10833
10834/*===========================================================================
10835 * FUNCTION   : setPAAFSupport
10836 *
10837 * DESCRIPTION: Set the preview-assisted auto focus support bit in
10838 *              feature mask according to stream type and filter
10839 *              arrangement
10840 *
10841 * PARAMETERS : @feature_mask: current feature mask, which may be modified
10842 *              @stream_type: stream type
10843 *              @filter_arrangement: filter arrangement
10844 *
10845 * RETURN     : None
10846 *==========================================================================*/
10847void QCamera3HardwareInterface::setPAAFSupport(
10848        cam_feature_mask_t& feature_mask,
10849        cam_stream_type_t stream_type,
10850        cam_color_filter_arrangement_t filter_arrangement)
10851{
10852    LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
10853            feature_mask, stream_type, filter_arrangement);
10854
10855    switch (filter_arrangement) {
10856    case CAM_FILTER_ARRANGEMENT_RGGB:
10857    case CAM_FILTER_ARRANGEMENT_GRBG:
10858    case CAM_FILTER_ARRANGEMENT_GBRG:
10859    case CAM_FILTER_ARRANGEMENT_BGGR:
10860        if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
10861                (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
10862                (stream_type == CAM_STREAM_TYPE_VIDEO)) {
10863            feature_mask |= CAM_QCOM_FEATURE_PAAF;
10864        }
10865        break;
10866    case CAM_FILTER_ARRANGEMENT_Y:
10867        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
10868            feature_mask |= CAM_QCOM_FEATURE_PAAF;
10869        }
10870        break;
10871    default:
10872        break;
10873    }
10874}
10875}; //end namespace qcamera
10876