QCamera3HWI.cpp revision 6430eca00a1ce64295df92bcc8b4bd530ae71081
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define ATRACE_TAG ATRACE_TAG_CAMERA
31#define LOG_TAG "QCamera3HWI"
32//#define LOG_NDEBUG 0
33
34#define __STDC_LIMIT_MACROS
35#include <cutils/properties.h>
36#include <hardware/camera3.h>
37#include <camera/CameraMetadata.h>
38#include <stdio.h>
39#include <stdlib.h>
40#include <fcntl.h>
41#include <stdint.h>
42#include <utils/Log.h>
43#include <utils/Errors.h>
44#include <utils/Trace.h>
45#include <sync/sync.h>
46#include <gralloc_priv.h>
47#include "util/QCameraFlash.h"
48#include "QCamera3HWI.h"
49#include "QCamera3Mem.h"
50#include "QCamera3Channel.h"
51#include "QCamera3PostProc.h"
52#include "QCamera3VendorTags.h"
53
54using namespace android;
55
56namespace qcamera {
57
58#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60#define EMPTY_PIPELINE_DELAY 2
61#define PARTIAL_RESULT_COUNT 2
62#define FRAME_SKIP_DELAY     0
63#define CAM_MAX_SYNC_LATENCY 4
64
65#define MAX_VALUE_8BIT ((1<<8)-1)
66#define MAX_VALUE_10BIT ((1<<10)-1)
67#define MAX_VALUE_12BIT ((1<<12)-1)
68
69#define VIDEO_4K_WIDTH  3840
70#define VIDEO_4K_HEIGHT 2160
71
72#define MAX_EIS_WIDTH 1920
73#define MAX_EIS_HEIGHT 1080
74
75#define MAX_RAW_STREAMS        1
76#define MAX_STALLING_STREAMS   1
77#define MAX_PROCESSED_STREAMS  3
78/* Batch mode is enabled only if FPS set is equal to or greater than this */
79#define MIN_FPS_FOR_BATCH_MODE (120)
80#define PREVIEW_FPS_FOR_HFR    (30)
81#define DEFAULT_VIDEO_FPS      (30.0)
82#define MAX_HFR_BATCH_SIZE     (8)
83#define REGIONS_TUPLE_COUNT    5
84#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
85
86#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
87
88#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
89                                              CAM_QCOM_FEATURE_CROP |\
90                                              CAM_QCOM_FEATURE_ROTATION |\
91                                              CAM_QCOM_FEATURE_SHARPNESS |\
92                                              CAM_QCOM_FEATURE_SCALE |\
93                                              CAM_QCOM_FEATURE_CAC |\
94                                              CAM_QCOM_FEATURE_CDS )
95
96#define TIMEOUT_NEVER -1
97
98cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
99const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
100static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
101volatile uint32_t gCamHal3LogLevel = 1;
102
103const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
104    {"On",  CAM_CDS_MODE_ON},
105    {"Off", CAM_CDS_MODE_OFF},
106    {"Auto",CAM_CDS_MODE_AUTO}
107};
108
109const QCamera3HardwareInterface::QCameraMap<
110        camera_metadata_enum_android_control_effect_mode_t,
111        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
112    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
113    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
114    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
115    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
116    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
117    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
118    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
119    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
120    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
121};
122
123const QCamera3HardwareInterface::QCameraMap<
124        camera_metadata_enum_android_control_awb_mode_t,
125        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
126    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
127    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
128    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
129    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
130    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
131    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
132    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
133    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
134    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
135};
136
137const QCamera3HardwareInterface::QCameraMap<
138        camera_metadata_enum_android_control_scene_mode_t,
139        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
140    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
141    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
142    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
143    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
144    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
145    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
146    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
147    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
148    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
149    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
150    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
151    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
152    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
153    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
154    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
155    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
156};
157
158const QCamera3HardwareInterface::QCameraMap<
159        camera_metadata_enum_android_control_af_mode_t,
160        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
161    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
162    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
163    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
164    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
165    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
166    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
167    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
168};
169
170const QCamera3HardwareInterface::QCameraMap<
171        camera_metadata_enum_android_color_correction_aberration_mode_t,
172        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
173    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
174            CAM_COLOR_CORRECTION_ABERRATION_OFF },
175    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
176            CAM_COLOR_CORRECTION_ABERRATION_FAST },
177    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
178            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_control_ae_antibanding_mode_t,
183        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
184    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
185    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
186    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
187    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
188};
189
190const QCamera3HardwareInterface::QCameraMap<
191        camera_metadata_enum_android_control_ae_mode_t,
192        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
193    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
194    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
195    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
196    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
197    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
198};
199
200const QCamera3HardwareInterface::QCameraMap<
201        camera_metadata_enum_android_flash_mode_t,
202        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
203    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
204    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
205    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209        camera_metadata_enum_android_statistics_face_detect_mode_t,
210        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
211    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
212    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
213    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
214};
215
216const QCamera3HardwareInterface::QCameraMap<
217        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
218        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
219    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
220      CAM_FOCUS_UNCALIBRATED },
221    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
222      CAM_FOCUS_APPROXIMATE },
223    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
224      CAM_FOCUS_CALIBRATED }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_state_t,
229        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
230    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
231    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
232};
233
234const int32_t available_thumbnail_sizes[] = {0, 0,
235                                             176, 144,
236                                             320, 240,
237                                             432, 288,
238                                             480, 288,
239                                             512, 288,
240                                             512, 384};
241
242const QCamera3HardwareInterface::QCameraMap<
243        camera_metadata_enum_android_sensor_test_pattern_mode_t,
244        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
245    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
246    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
247    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
248    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
249    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
250};
251
252/* Since there is no mapping for all the options some Android enum are not listed.
253 * Also, the order in this list is important because while mapping from HAL to Android it will
254 * traverse from lower to higher index which means that for HAL values that are map to different
255 * Android values, the traverse logic will select the first one found.
256 */
257const QCamera3HardwareInterface::QCameraMap<
258        camera_metadata_enum_android_sensor_reference_illuminant1_t,
259        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
260    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
261    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
262    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
263    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
264    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
265    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
266    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
267    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
268    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
269    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
270    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
271    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
272    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
273    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
276};
277
278const QCamera3HardwareInterface::QCameraMap<
279        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
280    { 60, CAM_HFR_MODE_60FPS},
281    { 90, CAM_HFR_MODE_90FPS},
282    { 120, CAM_HFR_MODE_120FPS},
283    { 150, CAM_HFR_MODE_150FPS},
284    { 180, CAM_HFR_MODE_180FPS},
285    { 210, CAM_HFR_MODE_210FPS},
286    { 240, CAM_HFR_MODE_240FPS},
287    { 480, CAM_HFR_MODE_480FPS},
288};
289
290camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
291    initialize:                         QCamera3HardwareInterface::initialize,
292    configure_streams:                  QCamera3HardwareInterface::configure_streams,
293    register_stream_buffers:            NULL,
294    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
295    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
296    get_metadata_vendor_tag_ops:        NULL,
297    dump:                               QCamera3HardwareInterface::dump,
298    flush:                              QCamera3HardwareInterface::flush,
299    reserved:                           {0},
300};
301
302/*===========================================================================
303 * FUNCTION   : QCamera3HardwareInterface
304 *
305 * DESCRIPTION: constructor of QCamera3HardwareInterface
306 *
307 * PARAMETERS :
308 *   @cameraId  : camera ID
309 *
310 * RETURN     : none
311 *==========================================================================*/
312QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
313        const camera_module_callbacks_t *callbacks)
314    : mCameraId(cameraId),
315      mCameraHandle(NULL),
316      mCameraOpened(false),
317      mCameraInitialized(false),
318      mCallbackOps(NULL),
319      mMetadataChannel(NULL),
320      mPictureChannel(NULL),
321      mRawChannel(NULL),
322      mSupportChannel(NULL),
323      mAnalysisChannel(NULL),
324      mRawDumpChannel(NULL),
325      mDummyBatchChannel(NULL),
326      mChannelHandle(0),
327      mFirstRequest(false),
328      mFirstConfiguration(true),
329      mFlush(false),
330      mParamHeap(NULL),
331      mParameters(NULL),
332      mPrevParameters(NULL),
333      m_bIsVideo(false),
334      m_bIs4KVideo(false),
335      m_bEisSupportedSize(false),
336      m_bEisEnable(false),
337      m_MobicatMask(0),
338      mMinProcessedFrameDuration(0),
339      mMinJpegFrameDuration(0),
340      mMinRawFrameDuration(0),
341      mMetaFrameCount(0U),
342      mUpdateDebugLevel(false),
343      mCallbacks(callbacks),
344      mCaptureIntent(0),
345      mBatchSize(0),
346      mToBeQueuedVidBufs(0),
347      mHFRVideoFps(DEFAULT_VIDEO_FPS),
348      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
349      mFirstFrameNumberInBatch(0),
350      mNeedSensorRestart(false),
351      mLdafCalibExist(false),
352      mPowerHintEnabled(false),
353      mLastCustIntentFrmNum(-1)
354{
355    getLogLevel();
356    m_perfLock.lock_init();
357    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
358    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
359    mCameraDevice.common.close = close_camera_device;
360    mCameraDevice.ops = &mCameraOps;
361    mCameraDevice.priv = this;
362    gCamCapability[cameraId]->version = CAM_HAL_V3;
363    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
364    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
365    gCamCapability[cameraId]->min_num_pp_bufs = 3;
366    pthread_cond_init(&mRequestCond, NULL);
367    mPendingLiveRequest = 0;
368    mCurrentRequestId = -1;
369    pthread_mutex_init(&mMutex, NULL);
370
371    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
372        mDefaultMetadata[i] = NULL;
373
374    // Getting system props of different kinds
375    char prop[PROPERTY_VALUE_MAX];
376    memset(prop, 0, sizeof(prop));
377    property_get("persist.camera.raw.dump", prop, "0");
378    mEnableRawDump = atoi(prop);
379    if (mEnableRawDump)
380        CDBG("%s: Raw dump from Camera HAL enabled", __func__);
381
382    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
383    memset(mLdafCalib, 0, sizeof(mLdafCalib));
384
385    memset(prop, 0, sizeof(prop));
386    property_get("persist.camera.tnr.preview", prop, "1");
387    m_bTnrPreview = (uint8_t)atoi(prop);
388
389    memset(prop, 0, sizeof(prop));
390    property_get("persist.camera.tnr.video", prop, "1");
391    m_bTnrVideo = (uint8_t)atoi(prop);
392}
393
394/*===========================================================================
395 * FUNCTION   : ~QCamera3HardwareInterface
396 *
397 * DESCRIPTION: destructor of QCamera3HardwareInterface
398 *
399 * PARAMETERS : none
400 *
401 * RETURN     : none
402 *==========================================================================*/
403QCamera3HardwareInterface::~QCamera3HardwareInterface()
404{
405    CDBG("%s: E", __func__);
406
407    /* Turn off current power hint before acquiring perfLock in case they
408     * conflict with each other */
409    disablePowerHint();
410
411    m_perfLock.lock_acq();
412
413    /* We need to stop all streams before deleting any stream */
414    if (mRawDumpChannel) {
415        mRawDumpChannel->stop();
416    }
417
418    // NOTE: 'camera3_stream_t *' objects are already freed at
419    //        this stage by the framework
420    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
421        it != mStreamInfo.end(); it++) {
422        QCamera3ProcessingChannel *channel = (*it)->channel;
423        if (channel) {
424            channel->stop();
425        }
426    }
427    if (mSupportChannel)
428        mSupportChannel->stop();
429
430    if (mAnalysisChannel) {
431        mAnalysisChannel->stop();
432    }
433    if (mMetadataChannel) {
434        mMetadataChannel->stop();
435    }
436    if (mChannelHandle) {
437        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
438                mChannelHandle);
439        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
440    }
441
442    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
443        it != mStreamInfo.end(); it++) {
444        QCamera3ProcessingChannel *channel = (*it)->channel;
445        if (channel)
446            delete channel;
447        free (*it);
448    }
449    if (mSupportChannel) {
450        delete mSupportChannel;
451        mSupportChannel = NULL;
452    }
453
454    if (mAnalysisChannel) {
455        delete mAnalysisChannel;
456        mAnalysisChannel = NULL;
457    }
458    if (mRawDumpChannel) {
459        delete mRawDumpChannel;
460        mRawDumpChannel = NULL;
461    }
462    if (mDummyBatchChannel) {
463        delete mDummyBatchChannel;
464        mDummyBatchChannel = NULL;
465    }
466    mPictureChannel = NULL;
467
468    if (mMetadataChannel) {
469        delete mMetadataChannel;
470        mMetadataChannel = NULL;
471    }
472
473    /* Clean up all channels */
474    if (mCameraInitialized) {
475        if(!mFirstConfiguration){
476            //send the last unconfigure
477            cam_stream_size_info_t stream_config_info;
478            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
479            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
480            stream_config_info.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
481            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
482                    stream_config_info);
483            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
484            if (rc < 0) {
485                ALOGE("%s: set_parms failed for unconfigure", __func__);
486            }
487        }
488        deinitParameters();
489    }
490
491    if (mChannelHandle) {
492        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
493                mChannelHandle);
494        ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
495        mChannelHandle = 0;
496    }
497
498    if (mCameraOpened)
499        closeCamera();
500
501    mPendingBuffersMap.mPendingBufferList.clear();
502    mPendingReprocessResultList.clear();
503    for (pendingRequestIterator i = mPendingRequestsList.begin();
504            i != mPendingRequestsList.end();) {
505        i = erasePendingRequest(i);
506    }
507    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
508        if (mDefaultMetadata[i])
509            free_camera_metadata(mDefaultMetadata[i]);
510
511    m_perfLock.lock_rel();
512    m_perfLock.lock_deinit();
513
514    pthread_cond_destroy(&mRequestCond);
515
516    pthread_mutex_destroy(&mMutex);
517    CDBG("%s: X", __func__);
518}
519
520/*===========================================================================
521 * FUNCTION   : erasePendingRequest
522 *
523 * DESCRIPTION: function to erase a desired pending request after freeing any
524 *              allocated memory
525 *
526 * PARAMETERS :
527 *   @i       : iterator pointing to pending request to be erased
528 *
529 * RETURN     : iterator pointing to the next request
530 *==========================================================================*/
531QCamera3HardwareInterface::pendingRequestIterator
532        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
533{
534    if (i->input_buffer != NULL) {
535        free(i->input_buffer);
536        i->input_buffer = NULL;
537    }
538    if (i->settings != NULL)
539        free_camera_metadata((camera_metadata_t*)i->settings);
540    return mPendingRequestsList.erase(i);
541}
542
543/*===========================================================================
544 * FUNCTION   : camEvtHandle
545 *
546 * DESCRIPTION: Function registered to mm-camera-interface to handle events
547 *
548 * PARAMETERS :
549 *   @camera_handle : interface layer camera handle
550 *   @evt           : ptr to event
551 *   @user_data     : user data ptr
552 *
553 * RETURN     : none
554 *==========================================================================*/
555void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
556                                          mm_camera_event_t *evt,
557                                          void *user_data)
558{
559    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
560    if (obj && evt) {
561        switch(evt->server_event_type) {
562            case CAM_EVENT_TYPE_DAEMON_DIED:
563                ALOGE("%s: Fatal, camera daemon died", __func__);
564                //close the camera backend
565                if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
566                        && obj->mCameraHandle->ops) {
567                    obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
568                } else {
569                    ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
570                            __func__);
571                }
572                camera3_notify_msg_t notify_msg;
573                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
574                notify_msg.type = CAMERA3_MSG_ERROR;
575                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
576                notify_msg.message.error.error_stream = NULL;
577                notify_msg.message.error.frame_number = 0;
578                obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
579                break;
580
581            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
582                CDBG("%s: HAL got request pull from Daemon", __func__);
583                pthread_mutex_lock(&obj->mMutex);
584                obj->mWokenUpByDaemon = true;
585                obj->unblockRequestIfNecessary();
586                pthread_mutex_unlock(&obj->mMutex);
587                break;
588
589            default:
590                CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
591                        evt->server_event_type);
592                break;
593        }
594    } else {
595        ALOGE("%s: NULL user_data/evt", __func__);
596    }
597}
598
599/*===========================================================================
600 * FUNCTION   : openCamera
601 *
602 * DESCRIPTION: open camera
603 *
604 * PARAMETERS :
605 *   @hw_device  : double ptr for camera device struct
606 *
607 * RETURN     : int32_t type of status
608 *              NO_ERROR  -- success
609 *              none-zero failure code
610 *==========================================================================*/
611int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
612{
613    int rc = 0;
614    if (mCameraOpened) {
615        *hw_device = NULL;
616        return PERMISSION_DENIED;
617    }
618    m_perfLock.lock_acq();
619    rc = openCamera();
620    if (rc == 0) {
621        *hw_device = &mCameraDevice.common;
622    } else
623        *hw_device = NULL;
624
625    m_perfLock.lock_rel();
626    return rc;
627}
628
629/*===========================================================================
630 * FUNCTION   : openCamera
631 *
632 * DESCRIPTION: open camera
633 *
634 * PARAMETERS : none
635 *
636 * RETURN     : int32_t type of status
637 *              NO_ERROR  -- success
638 *              none-zero failure code
639 *==========================================================================*/
640int QCamera3HardwareInterface::openCamera()
641{
642    int rc = 0;
643
644    ATRACE_CALL();
645    if (mCameraHandle) {
646        ALOGE("Failure: Camera already opened");
647        return ALREADY_EXISTS;
648    }
649
650    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
651    if (rc < 0) {
652        ALOGE("%s: Failed to reserve flash for camera id: %d",
653                __func__,
654                mCameraId);
655        return UNKNOWN_ERROR;
656    }
657
658    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
659    if (rc) {
660        ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
661        return rc;
662    }
663
664    mCameraOpened = true;
665
666    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
667            camEvtHandle, (void *)this);
668
669    if (rc < 0) {
670        ALOGE("%s: Error, failed to register event callback", __func__);
671        /* Not closing camera here since it is already handled in destructor */
672        return FAILED_TRANSACTION;
673    }
674    mFirstConfiguration = true;
675    return NO_ERROR;
676}
677
678/*===========================================================================
679 * FUNCTION   : closeCamera
680 *
681 * DESCRIPTION: close camera
682 *
683 * PARAMETERS : none
684 *
685 * RETURN     : int32_t type of status
686 *              NO_ERROR  -- success
687 *              none-zero failure code
688 *==========================================================================*/
689int QCamera3HardwareInterface::closeCamera()
690{
691    ATRACE_CALL();
692    int rc = NO_ERROR;
693
694    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
695    mCameraHandle = NULL;
696    mCameraOpened = false;
697
698    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
699        CDBG("%s: Failed to release flash for camera id: %d",
700                __func__,
701                mCameraId);
702    }
703
704    return rc;
705}
706
707/*===========================================================================
708 * FUNCTION   : initialize
709 *
710 * DESCRIPTION: Initialize frameworks callback functions
711 *
712 * PARAMETERS :
713 *   @callback_ops : callback function to frameworks
714 *
715 * RETURN     :
716 *
717 *==========================================================================*/
718int QCamera3HardwareInterface::initialize(
719        const struct camera3_callback_ops *callback_ops)
720{
721    ATRACE_CALL();
722    int rc;
723
724    pthread_mutex_lock(&mMutex);
725
726    rc = initParameters();
727    if (rc < 0) {
728        ALOGE("%s: initParamters failed %d", __func__, rc);
729       goto err1;
730    }
731    mCallbackOps = callback_ops;
732
733    mChannelHandle = mCameraHandle->ops->add_channel(
734            mCameraHandle->camera_handle, NULL, NULL, this);
735    if (mChannelHandle == 0) {
736        ALOGE("%s: add_channel failed", __func__);
737        rc = -ENOMEM;
738        pthread_mutex_unlock(&mMutex);
739        return rc;
740    }
741
742    pthread_mutex_unlock(&mMutex);
743    mCameraInitialized = true;
744    return 0;
745
746err1:
747    pthread_mutex_unlock(&mMutex);
748    return rc;
749}
750
751/*===========================================================================
752 * FUNCTION   : validateStreamDimensions
753 *
754 * DESCRIPTION: Check if the configuration requested are those advertised
755 *
756 * PARAMETERS :
757 *   @stream_list : streams to be configured
758 *
759 * RETURN     :
760 *
761 *==========================================================================*/
762int QCamera3HardwareInterface::validateStreamDimensions(
763        camera3_stream_configuration_t *streamList)
764{
765    int rc = NO_ERROR;
766    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
767    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
768    size_t count = 0;
769
770    camera3_stream_t *inputStream = NULL;
771    /*
772    * Loop through all streams to find input stream if it exists*
773    */
774    for (size_t i = 0; i< streamList->num_streams; i++) {
775        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
776            if (inputStream != NULL) {
777                ALOGE("%s: Error, Multiple input streams requested");
778                return -EINVAL;
779            }
780            inputStream = streamList->streams[i];
781        }
782    }
783    /*
784    * Loop through all streams requested in configuration
785    * Check if unsupported sizes have been requested on any of them
786    */
787    for (size_t j = 0; j < streamList->num_streams; j++) {
788        bool sizeFound = false;
789        size_t jpeg_sizes_cnt = 0;
790        camera3_stream_t *newStream = streamList->streams[j];
791
792        uint32_t rotatedHeight = newStream->height;
793        uint32_t rotatedWidth = newStream->width;
794        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
795                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
796            rotatedHeight = newStream->width;
797            rotatedWidth = newStream->height;
798        }
799
800        /*
801        * Sizes are different for each type of stream format check against
802        * appropriate table.
803        */
804        switch (newStream->format) {
805        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
806        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
807        case HAL_PIXEL_FORMAT_RAW10:
808            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
809            for (size_t i = 0; i < count; i++) {
810                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
811                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
812                    sizeFound = true;
813                    break;
814                }
815            }
816            break;
817        case HAL_PIXEL_FORMAT_BLOB:
818            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
819            /* Generate JPEG sizes table */
820            makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
821                    count,
822                    MAX_SIZES_CNT,
823                    available_processed_sizes);
824            jpeg_sizes_cnt = filterJpegSizes(
825                    available_jpeg_sizes,
826                    available_processed_sizes,
827                    count * 2,
828                    MAX_SIZES_CNT * 2,
829                    gCamCapability[mCameraId]->active_array_size,
830                    gCamCapability[mCameraId]->max_downscale_factor);
831
832            /* Verify set size against generated sizes table */
833            for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
834                if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
835                        ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
836                    sizeFound = true;
837                    break;
838                }
839            }
840            break;
841        case HAL_PIXEL_FORMAT_YCbCr_420_888:
842        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
843        default:
844            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
845                    || newStream->stream_type == CAMERA3_STREAM_INPUT
846                    || IS_USAGE_ZSL(newStream->usage)) {
847                if (((int32_t)rotatedWidth ==
848                                gCamCapability[mCameraId]->active_array_size.width) &&
849                                ((int32_t)rotatedHeight ==
850                                gCamCapability[mCameraId]->active_array_size.height)) {
851                    sizeFound = true;
852                    break;
853                }
854                /* We could potentially break here to enforce ZSL stream
855                 * set from frameworks always is full active array size
856                 * but it is not clear from the spc if framework will always
857                 * follow that, also we have logic to override to full array
858                 * size, so keeping the logic lenient at the moment
859                 */
860            }
861            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
862                    MAX_SIZES_CNT);
863            for (size_t i = 0; i < count; i++) {
864                if (((int32_t)rotatedWidth ==
865                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
866                            ((int32_t)rotatedHeight ==
867                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
868                    sizeFound = true;
869                    break;
870                }
871            }
872            break;
873        } /* End of switch(newStream->format) */
874
875        /* We error out even if a single stream has unsupported size set */
876        if (!sizeFound) {
877            ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
878                  "type:%d", __func__, rotatedWidth, rotatedHeight,
879                  newStream->format);
880            ALOGE("%s: Active array size is  %d x %d", __func__,
881                    gCamCapability[mCameraId]->active_array_size.width,
882                    gCamCapability[mCameraId]->active_array_size.height);
883            rc = -EINVAL;
884            break;
885        }
886    } /* End of for each stream */
887    return rc;
888}
889
890/*==============================================================================
891 * FUNCTION   : isSupportChannelNeeded
892 *
893 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
894 *
895 * PARAMETERS :
896 *   @stream_list : streams to be configured
897 *   @stream_config_info : the config info for streams to be configured
898 *
899 * RETURN     : Boolen true/false decision
900 *
901 *==========================================================================*/
902bool QCamera3HardwareInterface::isSupportChannelNeeded(
903        camera3_stream_configuration_t *streamList,
904        cam_stream_size_info_t stream_config_info)
905{
906    uint32_t i;
907    bool pprocRequested = false;
908    /* Check for conditions where PProc pipeline does not have any streams*/
909    for (i = 0; i < stream_config_info.num_streams; i++) {
910        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
911                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
912            pprocRequested = true;
913            break;
914        }
915    }
916
917    if (pprocRequested == false )
918        return true;
919
920    /* Dummy stream needed if only raw or jpeg streams present */
921    for (i = 0; i < streamList->num_streams; i++) {
922        switch(streamList->streams[i]->format) {
923            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
924            case HAL_PIXEL_FORMAT_RAW10:
925            case HAL_PIXEL_FORMAT_RAW16:
926            case HAL_PIXEL_FORMAT_BLOB:
927                break;
928            default:
929                return false;
930        }
931    }
932    return true;
933}
934
935/*==============================================================================
936 * FUNCTION   : getSensorOutputSize
937 *
938 * DESCRIPTION: Get sensor output size based on current stream configuratoin
939 *
940 * PARAMETERS :
941 *   @sensor_dim : sensor output dimension (output)
942 *
943 * RETURN     : int32_t type of status
944 *              NO_ERROR  -- success
945 *              none-zero failure code
946 *
947 *==========================================================================*/
948int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
949{
950    int32_t rc = NO_ERROR;
951
952    cam_dimension_t max_dim = {0, 0};
953    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
954        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
955            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
956        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
957            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
958    }
959
960    clear_metadata_buffer(mParameters);
961
962    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
963            max_dim);
964    if (rc != NO_ERROR) {
965        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
966        return rc;
967    }
968
969    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
970    if (rc != NO_ERROR) {
971        ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
972        return rc;
973    }
974
975    clear_metadata_buffer(mParameters);
976    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
977
978    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
979            mParameters);
980    if (rc != NO_ERROR) {
981        ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
982        return rc;
983    }
984
985    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
986    ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
987
988    return rc;
989}
990
991/*==============================================================================
992 * FUNCTION   : enablePowerHint
993 *
994 * DESCRIPTION: enable single powerhint for preview and different video modes.
995 *
996 * PARAMETERS :
997 *
998 * RETURN     : NULL
999 *
1000 *==========================================================================*/
1001void QCamera3HardwareInterface::enablePowerHint()
1002{
1003    if (!mPowerHintEnabled) {
1004        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1005        mPowerHintEnabled = true;
1006    }
1007}
1008
1009/*==============================================================================
1010 * FUNCTION   : disablePowerHint
1011 *
1012 * DESCRIPTION: disable current powerhint.
1013 *
1014 * PARAMETERS :
1015 *
1016 * RETURN     : NULL
1017 *
1018 *==========================================================================*/
1019void QCamera3HardwareInterface::disablePowerHint()
1020{
1021    if (mPowerHintEnabled) {
1022        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1023        mPowerHintEnabled = false;
1024    }
1025}
1026
1027/*===========================================================================
1028 * FUNCTION   : configureStreams
1029 *
1030 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1031 *              and output streams.
1032 *
1033 * PARAMETERS :
1034 *   @stream_list : streams to be configured
1035 *
1036 * RETURN     :
1037 *
1038 *==========================================================================*/
1039int QCamera3HardwareInterface::configureStreams(
1040        camera3_stream_configuration_t *streamList)
1041{
1042    ATRACE_CALL();
1043    int rc = 0;
1044
1045    // Acquire perfLock before configure streams
1046    m_perfLock.lock_acq();
1047    rc = configureStreamsPerfLocked(streamList);
1048    m_perfLock.lock_rel();
1049
1050    return rc;
1051}
1052
1053/*===========================================================================
1054 * FUNCTION   : configureStreamsPerfLocked
1055 *
1056 * DESCRIPTION: configureStreams while perfLock is held.
1057 *
1058 * PARAMETERS :
1059 *   @stream_list : streams to be configured
1060 *
1061 * RETURN     : int32_t type of status
1062 *              NO_ERROR  -- success
1063 *              none-zero failure code
1064 *==========================================================================*/
1065int QCamera3HardwareInterface::configureStreamsPerfLocked(
1066        camera3_stream_configuration_t *streamList)
1067{
1068    ATRACE_CALL();
1069    int rc = 0;
1070
1071    // Sanity check stream_list
1072    if (streamList == NULL) {
1073        ALOGE("%s: NULL stream configuration", __func__);
1074        return BAD_VALUE;
1075    }
1076    if (streamList->streams == NULL) {
1077        ALOGE("%s: NULL stream list", __func__);
1078        return BAD_VALUE;
1079    }
1080
1081    if (streamList->num_streams < 1) {
1082        ALOGE("%s: Bad number of streams requested: %d", __func__,
1083                streamList->num_streams);
1084        return BAD_VALUE;
1085    }
1086
1087    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1088        ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1089                MAX_NUM_STREAMS, streamList->num_streams);
1090        return BAD_VALUE;
1091    }
1092
1093    mOpMode = streamList->operation_mode;
1094    CDBG("%s: mOpMode: %d", __func__, mOpMode);
1095
1096    /* first invalidate all the steams in the mStreamList
1097     * if they appear again, they will be validated */
1098    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1099            it != mStreamInfo.end(); it++) {
1100        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1101        channel->stop();
1102        (*it)->status = INVALID;
1103    }
1104
1105    if (mRawDumpChannel) {
1106        mRawDumpChannel->stop();
1107        delete mRawDumpChannel;
1108        mRawDumpChannel = NULL;
1109    }
1110
1111    if (mSupportChannel)
1112        mSupportChannel->stop();
1113
1114    if (mAnalysisChannel) {
1115        mAnalysisChannel->stop();
1116    }
1117    if (mMetadataChannel) {
1118        /* If content of mStreamInfo is not 0, there is metadata stream */
1119        mMetadataChannel->stop();
1120    }
1121    if (mChannelHandle) {
1122        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1123                mChannelHandle);
1124        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1125    }
1126
1127    pthread_mutex_lock(&mMutex);
1128
1129    /* Check whether we have video stream */
1130    m_bIs4KVideo = false;
1131    m_bIsVideo = false;
1132    m_bEisSupportedSize = false;
1133    m_bTnrEnabled = false;
1134    bool isZsl = false;
1135    uint32_t videoWidth = 0U;
1136    uint32_t videoHeight = 0U;
1137    size_t rawStreamCnt = 0;
1138    size_t stallStreamCnt = 0;
1139    size_t processedStreamCnt = 0;
1140    // Number of streams on ISP encoder path
1141    size_t numStreamsOnEncoder = 0;
1142    size_t numYuv888OnEncoder = 0;
1143    bool bYuv888OverrideJpeg = false;
1144    cam_dimension_t largeYuv888Size = {0, 0};
1145    cam_dimension_t maxViewfinderSize = {0, 0};
1146    bool bJpegExceeds4K = false;
1147    bool bUseCommonFeatureMask = false;
1148    uint32_t commonFeatureMask = 0;
1149    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1150    camera3_stream_t *inputStream = NULL;
1151    bool isJpeg = false;
1152    cam_dimension_t jpegSize = {0, 0};
1153
1154    /*EIS configuration*/
1155    bool eisSupported = false;
1156    bool oisSupported = false;
1157    int32_t margin_index = -1;
1158    uint8_t eis_prop_set;
1159    uint32_t maxEisWidth = 0;
1160    uint32_t maxEisHeight = 0;
1161    int32_t hal_version = CAM_HAL_V3;
1162
1163    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1164
1165    size_t count = IS_TYPE_MAX;
1166    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1167    for (size_t i = 0; i < count; i++) {
1168        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1169            eisSupported = true;
1170            margin_index = (int32_t)i;
1171            break;
1172        }
1173    }
1174
1175    count = CAM_OPT_STAB_MAX;
1176    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1177    for (size_t i = 0; i < count; i++) {
1178        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1179            oisSupported = true;
1180            break;
1181        }
1182    }
1183
1184    if (eisSupported) {
1185        maxEisWidth = MAX_EIS_WIDTH;
1186        maxEisHeight = MAX_EIS_HEIGHT;
1187    }
1188
1189    /* EIS setprop control */
1190    char eis_prop[PROPERTY_VALUE_MAX];
1191    memset(eis_prop, 0, sizeof(eis_prop));
1192    property_get("persist.camera.eis.enable", eis_prop, "0");
1193    eis_prop_set = (uint8_t)atoi(eis_prop);
1194
1195    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1196            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1197
1198    /* stream configurations */
1199    for (size_t i = 0; i < streamList->num_streams; i++) {
1200        camera3_stream_t *newStream = streamList->streams[i];
1201        ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1202                "height = %d, rotation = %d, usage = 0x%x",
1203                __func__, i, newStream->stream_type, newStream->format,
1204                newStream->width, newStream->height, newStream->rotation,
1205                newStream->usage);
1206        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1207                newStream->stream_type == CAMERA3_STREAM_INPUT){
1208            isZsl = true;
1209        }
1210        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1211            inputStream = newStream;
1212        }
1213
1214        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1215            isJpeg = true;
1216            jpegSize.width = newStream->width;
1217            jpegSize.height = newStream->height;
1218            if (newStream->width > VIDEO_4K_WIDTH ||
1219                    newStream->height > VIDEO_4K_HEIGHT)
1220                bJpegExceeds4K = true;
1221        }
1222
1223        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1224                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1225            m_bIsVideo = true;
1226            videoWidth = newStream->width;
1227            videoHeight = newStream->height;
1228            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1229                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1230                m_bIs4KVideo = true;
1231            }
1232            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1233                                  (newStream->height <= maxEisHeight);
1234        }
1235        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1236                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1237            switch (newStream->format) {
1238            case HAL_PIXEL_FORMAT_BLOB:
1239                stallStreamCnt++;
1240                if (isOnEncoder(maxViewfinderSize, newStream->width,
1241                        newStream->height)) {
1242                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1243                    numStreamsOnEncoder++;
1244                }
1245                break;
1246            case HAL_PIXEL_FORMAT_RAW10:
1247            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1248            case HAL_PIXEL_FORMAT_RAW16:
1249                rawStreamCnt++;
1250                break;
1251            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1252                processedStreamCnt++;
1253                if (isOnEncoder(maxViewfinderSize, newStream->width,
1254                        newStream->height)) {
1255                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1256                            IS_USAGE_ZSL(newStream->usage)) {
1257                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1258                    } else {
1259                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1260                    }
1261                    numStreamsOnEncoder++;
1262                }
1263                break;
1264            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1265                processedStreamCnt++;
1266                if (isOnEncoder(maxViewfinderSize, newStream->width,
1267                        newStream->height)) {
1268                    // If Yuv888 size is not greater than 4K, set feature mask
1269                    // to SUPERSET so that it support concurrent request on
1270                    // YUV and JPEG.
1271                    if (newStream->width <= VIDEO_4K_WIDTH &&
1272                            newStream->height <= VIDEO_4K_HEIGHT) {
1273                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1274                    } else {
1275                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1276                    }
1277                    numStreamsOnEncoder++;
1278                    numYuv888OnEncoder++;
1279                    largeYuv888Size.width = newStream->width;
1280                    largeYuv888Size.height = newStream->height;
1281                }
1282                break;
1283            default:
1284                processedStreamCnt++;
1285                if (isOnEncoder(maxViewfinderSize, newStream->width,
1286                        newStream->height)) {
1287                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1288                    numStreamsOnEncoder++;
1289                }
1290                break;
1291            }
1292
1293        }
1294    }
1295
1296    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1297        !m_bIsVideo) {
1298        m_bEisEnable = false;
1299    }
1300
1301    /* Logic to enable/disable TNR based on specific config size/etc.*/
1302    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1303            ((videoWidth == 1920 && videoHeight == 1080) ||
1304            (videoWidth == 1280 && videoHeight == 720)) &&
1305            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1306        m_bTnrEnabled = true;
1307
1308    /* Check if num_streams is sane */
1309    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1310            rawStreamCnt > MAX_RAW_STREAMS ||
1311            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1312        ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1313                __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1314        pthread_mutex_unlock(&mMutex);
1315        return -EINVAL;
1316    }
1317    /* Check whether we have zsl stream or 4k video case */
1318    if (isZsl && m_bIsVideo) {
1319        ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1320        pthread_mutex_unlock(&mMutex);
1321        return -EINVAL;
1322    }
1323    /* Check if stream sizes are sane */
1324    if (numStreamsOnEncoder > 2) {
1325        ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1326                __func__);
1327        pthread_mutex_unlock(&mMutex);
1328        return -EINVAL;
1329    } else if (1 < numStreamsOnEncoder){
1330        bUseCommonFeatureMask = true;
1331        CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1332                __func__);
1333    }
1334
1335    /* Check if BLOB size is greater than 4k in 4k recording case */
1336    if (m_bIs4KVideo && bJpegExceeds4K) {
1337        ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1338                __func__);
1339        pthread_mutex_unlock(&mMutex);
1340        return -EINVAL;
1341    }
1342
1343    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1344    // the YUV stream's size is greater or equal to the JPEG size, set common
1345    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1346    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1347            jpegSize.width, jpegSize.height) &&
1348            largeYuv888Size.width > jpegSize.width &&
1349            largeYuv888Size.height > jpegSize.height) {
1350        bYuv888OverrideJpeg = true;
1351    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1352        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1353    }
1354
1355    rc = validateStreamDimensions(streamList);
1356    if (rc == NO_ERROR) {
1357        rc = validateStreamRotations(streamList);
1358    }
1359    if (rc != NO_ERROR) {
1360        ALOGE("%s: Invalid stream configuration requested!", __func__);
1361        pthread_mutex_unlock(&mMutex);
1362        return rc;
1363    }
1364
1365    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1366    camera3_stream_t *jpegStream = NULL;
1367    for (size_t i = 0; i < streamList->num_streams; i++) {
1368        camera3_stream_t *newStream = streamList->streams[i];
1369        CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1370                "stream size : %d x %d, stream rotation = %d",
1371                __func__, newStream->stream_type, newStream->format,
1372                newStream->width, newStream->height, newStream->rotation);
1373        //if the stream is in the mStreamList validate it
1374        bool stream_exists = false;
1375        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1376                it != mStreamInfo.end(); it++) {
1377            if ((*it)->stream == newStream) {
1378                QCamera3ProcessingChannel *channel =
1379                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1380                stream_exists = true;
1381                if (channel)
1382                    delete channel;
1383                (*it)->status = VALID;
1384                (*it)->stream->priv = NULL;
1385                (*it)->channel = NULL;
1386            }
1387        }
1388        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1389            //new stream
1390            stream_info_t* stream_info;
1391            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1392            if (!stream_info) {
1393               ALOGE("%s: Could not allocate stream info", __func__);
1394               rc = -ENOMEM;
1395               pthread_mutex_unlock(&mMutex);
1396               return rc;
1397            }
1398            stream_info->stream = newStream;
1399            stream_info->status = VALID;
1400            stream_info->channel = NULL;
1401            mStreamInfo.push_back(stream_info);
1402        }
1403        /* Covers Opaque ZSL and API1 F/W ZSL */
1404        if (IS_USAGE_ZSL(newStream->usage)
1405                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1406            if (zslStream != NULL) {
1407                ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1408                pthread_mutex_unlock(&mMutex);
1409                return BAD_VALUE;
1410            }
1411            zslStream = newStream;
1412        }
1413        /* Covers YUV reprocess */
1414        if (inputStream != NULL) {
1415            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1416                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1417                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1418                    && inputStream->width == newStream->width
1419                    && inputStream->height == newStream->height) {
1420                if (zslStream != NULL) {
1421                    /* This scenario indicates multiple YUV streams with same size
1422                     * as input stream have been requested, since zsl stream handle
1423                     * is solely use for the purpose of overriding the size of streams
1424                     * which share h/w streams we will just make a guess here as to
1425                     * which of the stream is a ZSL stream, this will be refactored
1426                     * once we make generic logic for streams sharing encoder output
1427                     */
1428                    CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1429                }
1430                zslStream = newStream;
1431            }
1432        }
1433        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1434            jpegStream = newStream;
1435        }
1436    }
1437
1438    /* If a zsl stream is set, we know that we have configured at least one input or
1439       bidirectional stream */
1440    if (NULL != zslStream) {
1441        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1442        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1443        mInputStreamInfo.format = zslStream->format;
1444        mInputStreamInfo.usage = zslStream->usage;
1445        CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1446                __func__, mInputStreamInfo.dim.width,
1447                mInputStreamInfo.dim.height,
1448                mInputStreamInfo.format, mInputStreamInfo.usage);
1449    }
1450
1451    cleanAndSortStreamInfo();
1452    if (mMetadataChannel) {
1453        delete mMetadataChannel;
1454        mMetadataChannel = NULL;
1455    }
1456    if (mSupportChannel) {
1457        delete mSupportChannel;
1458        mSupportChannel = NULL;
1459    }
1460
1461    if (mAnalysisChannel) {
1462        delete mAnalysisChannel;
1463        mAnalysisChannel = NULL;
1464    }
1465
1466    if (mDummyBatchChannel) {
1467        delete mDummyBatchChannel;
1468        mDummyBatchChannel = NULL;
1469    }
1470
1471    //Create metadata channel and initialize it
1472    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1473                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1474                    &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1475    if (mMetadataChannel == NULL) {
1476        ALOGE("%s: failed to allocate metadata channel", __func__);
1477        rc = -ENOMEM;
1478        pthread_mutex_unlock(&mMutex);
1479        return rc;
1480    }
1481    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1482    if (rc < 0) {
1483        ALOGE("%s: metadata channel initialization failed", __func__);
1484        delete mMetadataChannel;
1485        mMetadataChannel = NULL;
1486        pthread_mutex_unlock(&mMutex);
1487        return rc;
1488    }
1489
1490    // Create analysis stream all the time, even when h/w support is not available
1491    {
1492        mAnalysisChannel = new QCamera3SupportChannel(
1493                mCameraHandle->camera_handle,
1494                mChannelHandle,
1495                mCameraHandle->ops,
1496                &gCamCapability[mCameraId]->padding_info,
1497                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1498                CAM_STREAM_TYPE_ANALYSIS,
1499                &gCamCapability[mCameraId]->analysis_recommended_res,
1500                gCamCapability[mCameraId]->analysis_recommended_format,
1501                this,
1502                0); // force buffer count to 0
1503        if (!mAnalysisChannel) {
1504            ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1505            pthread_mutex_unlock(&mMutex);
1506            return -ENOMEM;
1507        }
1508    }
1509
1510    bool isRawStreamRequested = false;
1511    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1512    /* Allocate channel objects for the requested streams */
1513    for (size_t i = 0; i < streamList->num_streams; i++) {
1514        camera3_stream_t *newStream = streamList->streams[i];
1515        uint32_t stream_usage = newStream->usage;
1516        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1517        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1518        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1519                || IS_USAGE_ZSL(newStream->usage)) &&
1520            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1521            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1522            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1523        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1524                CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1525        } else {
1526            //for non zsl streams find out the format
1527            switch (newStream->format) {
1528            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1529              {
1530                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1531                         = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1532
1533                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1534
1535                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1536                     if (m_bTnrEnabled && m_bTnrVideo) {
1537                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1538                             CAM_QCOM_FEATURE_CPP_TNR;
1539                     }
1540
1541                 } else {
1542
1543                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1544                     if (m_bTnrEnabled && m_bTnrPreview) {
1545                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1546                             CAM_QCOM_FEATURE_CPP_TNR;
1547                     }
1548                 }
1549
1550                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1551                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1552                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1553                             newStream->height;
1554                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1555                             newStream->width;
1556                 }
1557              }
1558              break;
1559           case HAL_PIXEL_FORMAT_YCbCr_420_888:
1560              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1561              if (isOnEncoder(maxViewfinderSize, newStream->width,
1562                      newStream->height)) {
1563                  if (bUseCommonFeatureMask)
1564                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1565                              commonFeatureMask;
1566                  else
1567                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1568                              CAM_QCOM_FEATURE_NONE;
1569              } else {
1570                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1571                          CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1572              }
1573              break;
1574           case HAL_PIXEL_FORMAT_BLOB:
1575              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1576              if (m_bIs4KVideo && !isZsl) {
1577                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1578                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1579              } else {
1580                  if (bUseCommonFeatureMask &&
1581                          isOnEncoder(maxViewfinderSize, newStream->width,
1582                                  newStream->height)) {
1583                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1584                  } else {
1585                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1586                  }
1587              }
1588              if (isZsl) {
1589                  if (zslStream) {
1590                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1591                              (int32_t)zslStream->width;
1592                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1593                              (int32_t)zslStream->height;
1594                  } else {
1595                      ALOGE("%s: Error, No ZSL stream identified",__func__);
1596                      pthread_mutex_unlock(&mMutex);
1597                      return -EINVAL;
1598                  }
1599              } else if (m_bIs4KVideo) {
1600                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1601                          (int32_t)videoWidth;
1602                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1603                          (int32_t)videoHeight;
1604              } else if (bYuv888OverrideJpeg) {
1605                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1606                          (int32_t)largeYuv888Size.width;
1607                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1608                          (int32_t)largeYuv888Size.height;
1609              }
1610              break;
1611           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1612           case HAL_PIXEL_FORMAT_RAW16:
1613           case HAL_PIXEL_FORMAT_RAW10:
1614              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1615              isRawStreamRequested = true;
1616              break;
1617           default:
1618              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1619              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1620              break;
1621            }
1622
1623        }
1624
1625        if (newStream->priv == NULL) {
1626            //New stream, construct channel
1627            switch (newStream->stream_type) {
1628            case CAMERA3_STREAM_INPUT:
1629                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1630                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1631                break;
1632            case CAMERA3_STREAM_BIDIRECTIONAL:
1633                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1634                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1635                break;
1636            case CAMERA3_STREAM_OUTPUT:
1637                /* For video encoding stream, set read/write rarely
1638                 * flag so that they may be set to un-cached */
1639                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1640                    newStream->usage |=
1641                         (GRALLOC_USAGE_SW_READ_RARELY |
1642                         GRALLOC_USAGE_SW_WRITE_RARELY |
1643                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1644                else if (IS_USAGE_ZSL(newStream->usage))
1645                    CDBG("%s: ZSL usage flag skipping", __func__);
1646                else if (newStream == zslStream
1647                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1648                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1649                } else
1650                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1651                break;
1652            default:
1653                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1654                break;
1655            }
1656
1657            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1658                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1659                QCamera3ProcessingChannel *channel = NULL;
1660                switch (newStream->format) {
1661                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1662                    if ((newStream->usage &
1663                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1664                            (streamList->operation_mode ==
1665                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1666                    ) {
1667                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1668                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1669                                &gCamCapability[mCameraId]->padding_info,
1670                                this,
1671                                newStream,
1672                                (cam_stream_type_t)
1673                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1674                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1675                                mMetadataChannel,
1676                                0); //heap buffers are not required for HFR video channel
1677                        if (channel == NULL) {
1678                            ALOGE("%s: allocation of channel failed", __func__);
1679                            pthread_mutex_unlock(&mMutex);
1680                            return -ENOMEM;
1681                        }
1682                        //channel->getNumBuffers() will return 0 here so use
1683                        //MAX_INFLIGH_HFR_REQUESTS
1684                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1685                        newStream->priv = channel;
1686                        ALOGI("%s: num video buffers in HFR mode: %d",
1687                                __func__, MAX_INFLIGHT_HFR_REQUESTS);
1688                    } else {
1689                        /* Copy stream contents in HFR preview only case to create
1690                         * dummy batch channel so that sensor streaming is in
1691                         * HFR mode */
1692                        if (!m_bIsVideo && (streamList->operation_mode ==
1693                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1694                            mDummyBatchStream = *newStream;
1695                        }
1696                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1697                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1698                                &gCamCapability[mCameraId]->padding_info,
1699                                this,
1700                                newStream,
1701                                (cam_stream_type_t)
1702                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1703                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1704                                mMetadataChannel,
1705                                MAX_INFLIGHT_REQUESTS);
1706                        if (channel == NULL) {
1707                            ALOGE("%s: allocation of channel failed", __func__);
1708                            pthread_mutex_unlock(&mMutex);
1709                            return -ENOMEM;
1710                        }
1711                        newStream->max_buffers = channel->getNumBuffers();
1712                        newStream->priv = channel;
1713                    }
1714                    break;
1715                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1716                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1717                            mChannelHandle,
1718                            mCameraHandle->ops, captureResultCb,
1719                            &gCamCapability[mCameraId]->padding_info,
1720                            this,
1721                            newStream,
1722                            (cam_stream_type_t)
1723                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1724                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1725                            mMetadataChannel);
1726                    if (channel == NULL) {
1727                        ALOGE("%s: allocation of YUV channel failed", __func__);
1728                        pthread_mutex_unlock(&mMutex);
1729                        return -ENOMEM;
1730                    }
1731                    newStream->max_buffers = channel->getNumBuffers();
1732                    newStream->priv = channel;
1733                    break;
1734                }
1735                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1736                case HAL_PIXEL_FORMAT_RAW16:
1737                case HAL_PIXEL_FORMAT_RAW10:
1738                    mRawChannel = new QCamera3RawChannel(
1739                            mCameraHandle->camera_handle, mChannelHandle,
1740                            mCameraHandle->ops, captureResultCb,
1741                            &gCamCapability[mCameraId]->padding_info,
1742                            this, newStream, CAM_QCOM_FEATURE_NONE,
1743                            mMetadataChannel,
1744                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1745                    if (mRawChannel == NULL) {
1746                        ALOGE("%s: allocation of raw channel failed", __func__);
1747                        pthread_mutex_unlock(&mMutex);
1748                        return -ENOMEM;
1749                    }
1750                    newStream->max_buffers = mRawChannel->getNumBuffers();
1751                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1752                    break;
1753                case HAL_PIXEL_FORMAT_BLOB:
1754                    // Max live snapshot inflight buffer is 1. This is to mitigate
1755                    // frame drop issues for video snapshot. The more buffers being
1756                    // allocated, the more frame drops there are.
1757                    mPictureChannel = new QCamera3PicChannel(
1758                            mCameraHandle->camera_handle, mChannelHandle,
1759                            mCameraHandle->ops, captureResultCb,
1760                            &gCamCapability[mCameraId]->padding_info, this, newStream,
1761                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1762                            m_bIs4KVideo, isZsl, mMetadataChannel,
1763                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1764                    if (mPictureChannel == NULL) {
1765                        ALOGE("%s: allocation of channel failed", __func__);
1766                        pthread_mutex_unlock(&mMutex);
1767                        return -ENOMEM;
1768                    }
1769                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1770                    newStream->max_buffers = mPictureChannel->getNumBuffers();
1771                    mPictureChannel->overrideYuvSize(
1772                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1773                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1774                    break;
1775
1776                default:
1777                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1778                    break;
1779                }
1780            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1781                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1782            } else {
1783                ALOGE("%s: Error, Unknown stream type", __func__);
1784                return -EINVAL;
1785            }
1786
1787            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1788                    it != mStreamInfo.end(); it++) {
1789                if ((*it)->stream == newStream) {
1790                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1791                    break;
1792                }
1793            }
1794        } else {
1795            // Channel already exists for this stream
1796            // Do nothing for now
1797        }
1798
1799    /* Do not add entries for input stream in metastream info
1800         * since there is no real stream associated with it
1801         */
1802        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1803            mStreamConfigInfo.num_streams++;
1804    }
1805
1806    //RAW DUMP channel
1807    if (mEnableRawDump && isRawStreamRequested == false){
1808        cam_dimension_t rawDumpSize;
1809        rawDumpSize = getMaxRawSize(mCameraId);
1810        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1811                                  mChannelHandle,
1812                                  mCameraHandle->ops,
1813                                  rawDumpSize,
1814                                  &gCamCapability[mCameraId]->padding_info,
1815                                  this, CAM_QCOM_FEATURE_NONE);
1816        if (!mRawDumpChannel) {
1817            ALOGE("%s: Raw Dump channel cannot be created", __func__);
1818            pthread_mutex_unlock(&mMutex);
1819            return -ENOMEM;
1820        }
1821    }
1822
1823
1824    if (mAnalysisChannel) {
1825        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1826                gCamCapability[mCameraId]->analysis_recommended_res;
1827        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1828                CAM_STREAM_TYPE_ANALYSIS;
1829        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1830                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1831        mStreamConfigInfo.num_streams++;
1832    }
1833
1834    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1835        mSupportChannel = new QCamera3SupportChannel(
1836                mCameraHandle->camera_handle,
1837                mChannelHandle,
1838                mCameraHandle->ops,
1839                &gCamCapability[mCameraId]->padding_info,
1840                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1841                CAM_STREAM_TYPE_CALLBACK,
1842                &QCamera3SupportChannel::kDim,
1843                CAM_FORMAT_YUV_420_NV21,
1844                this);
1845        if (!mSupportChannel) {
1846            ALOGE("%s: dummy channel cannot be created", __func__);
1847            pthread_mutex_unlock(&mMutex);
1848            return -ENOMEM;
1849        }
1850    }
1851
1852    if (mSupportChannel) {
1853        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1854                QCamera3SupportChannel::kDim;
1855        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1856                CAM_STREAM_TYPE_CALLBACK;
1857        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1858                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1859        mStreamConfigInfo.num_streams++;
1860    }
1861
1862    if (mRawDumpChannel) {
1863        cam_dimension_t rawSize;
1864        rawSize = getMaxRawSize(mCameraId);
1865        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1866                rawSize;
1867        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1868                CAM_STREAM_TYPE_RAW;
1869        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1870                CAM_QCOM_FEATURE_NONE;
1871        mStreamConfigInfo.num_streams++;
1872    }
1873    /* In HFR mode, if video stream is not added, create a dummy channel so that
1874     * ISP can create a batch mode even for preview only case. This channel is
1875     * never 'start'ed (no stream-on), it is only 'initialized'  */
1876    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1877            !m_bIsVideo) {
1878        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1879                mChannelHandle,
1880                mCameraHandle->ops, captureResultCb,
1881                &gCamCapability[mCameraId]->padding_info,
1882                this,
1883                &mDummyBatchStream,
1884                CAM_STREAM_TYPE_VIDEO,
1885                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1886                mMetadataChannel);
1887        if (NULL == mDummyBatchChannel) {
1888            ALOGE("%s: creation of mDummyBatchChannel failed."
1889                    "Preview will use non-hfr sensor mode ", __func__);
1890        }
1891    }
1892    if (mDummyBatchChannel) {
1893        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1894                mDummyBatchStream.width;
1895        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1896                mDummyBatchStream.height;
1897        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1898                CAM_STREAM_TYPE_VIDEO;
1899        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1900                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1901        mStreamConfigInfo.num_streams++;
1902    }
1903
1904    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1905    mStreamConfigInfo.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
1906
1907    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1908    for (pendingRequestIterator i = mPendingRequestsList.begin();
1909            i != mPendingRequestsList.end();) {
1910        i = erasePendingRequest(i);
1911    }
1912    mPendingFrameDropList.clear();
1913    // Initialize/Reset the pending buffers list
1914    mPendingBuffersMap.num_buffers = 0;
1915    mPendingBuffersMap.mPendingBufferList.clear();
1916    mPendingReprocessResultList.clear();
1917
1918    mFirstRequest = true;
1919    mCurJpegMeta.clear();
1920    //Get min frame duration for this streams configuration
1921    deriveMinFrameDuration();
1922
1923    /* Turn on video hint only if video stream is configured */
1924
1925    pthread_mutex_unlock(&mMutex);
1926
1927    return rc;
1928}
1929
1930/*===========================================================================
1931 * FUNCTION   : validateCaptureRequest
1932 *
1933 * DESCRIPTION: validate a capture request from camera service
1934 *
1935 * PARAMETERS :
1936 *   @request : request from framework to process
1937 *
1938 * RETURN     :
1939 *
1940 *==========================================================================*/
1941int QCamera3HardwareInterface::validateCaptureRequest(
1942                    camera3_capture_request_t *request)
1943{
1944    ssize_t idx = 0;
1945    const camera3_stream_buffer_t *b;
1946    CameraMetadata meta;
1947
1948    /* Sanity check the request */
1949    if (request == NULL) {
1950        ALOGE("%s: NULL capture request", __func__);
1951        return BAD_VALUE;
1952    }
1953
1954    if (request->settings == NULL && mFirstRequest) {
1955        /*settings cannot be null for the first request*/
1956        return BAD_VALUE;
1957    }
1958
1959    uint32_t frameNumber = request->frame_number;
1960    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1961        ALOGE("%s: Request %d: No output buffers provided!",
1962                __FUNCTION__, frameNumber);
1963        return BAD_VALUE;
1964    }
1965    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
1966        ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
1967                __func__, request->num_output_buffers, MAX_NUM_STREAMS);
1968        return BAD_VALUE;
1969    }
1970    if (request->input_buffer != NULL) {
1971        b = request->input_buffer;
1972        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1973            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1974                    __func__, frameNumber, (long)idx);
1975            return BAD_VALUE;
1976        }
1977        if (b->release_fence != -1) {
1978            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1979                    __func__, frameNumber, (long)idx);
1980            return BAD_VALUE;
1981        }
1982        if (b->buffer == NULL) {
1983            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1984                    __func__, frameNumber, (long)idx);
1985            return BAD_VALUE;
1986        }
1987    }
1988
1989    // Validate all buffers
1990    b = request->output_buffers;
1991    do {
1992        QCamera3ProcessingChannel *channel =
1993                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
1994        if (channel == NULL) {
1995            ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1996                    __func__, frameNumber, (long)idx);
1997            return BAD_VALUE;
1998        }
1999        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2000            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2001                    __func__, frameNumber, (long)idx);
2002            return BAD_VALUE;
2003        }
2004        if (b->release_fence != -1) {
2005            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2006                    __func__, frameNumber, (long)idx);
2007            return BAD_VALUE;
2008        }
2009        if (b->buffer == NULL) {
2010            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2011                    __func__, frameNumber, (long)idx);
2012            return BAD_VALUE;
2013        }
2014        if (*(b->buffer) == NULL) {
2015            ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2016                    __func__, frameNumber, (long)idx);
2017            return BAD_VALUE;
2018        }
2019        idx++;
2020        b = request->output_buffers + idx;
2021    } while (idx < (ssize_t)request->num_output_buffers);
2022
2023    return NO_ERROR;
2024}
2025
2026/*===========================================================================
2027 * FUNCTION   : deriveMinFrameDuration
2028 *
2029 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2030 *              on currently configured streams.
2031 *
2032 * PARAMETERS : NONE
2033 *
2034 * RETURN     : NONE
2035 *
2036 *==========================================================================*/
2037void QCamera3HardwareInterface::deriveMinFrameDuration()
2038{
2039    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2040
2041    maxJpegDim = 0;
2042    maxProcessedDim = 0;
2043    maxRawDim = 0;
2044
2045    // Figure out maximum jpeg, processed, and raw dimensions
2046    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2047        it != mStreamInfo.end(); it++) {
2048
2049        // Input stream doesn't have valid stream_type
2050        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2051            continue;
2052
2053        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2054        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2055            if (dimension > maxJpegDim)
2056                maxJpegDim = dimension;
2057        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2058                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2059                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2060            if (dimension > maxRawDim)
2061                maxRawDim = dimension;
2062        } else {
2063            if (dimension > maxProcessedDim)
2064                maxProcessedDim = dimension;
2065        }
2066    }
2067
2068    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2069            MAX_SIZES_CNT);
2070
2071    //Assume all jpeg dimensions are in processed dimensions.
2072    if (maxJpegDim > maxProcessedDim)
2073        maxProcessedDim = maxJpegDim;
2074    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2075    if (maxProcessedDim > maxRawDim) {
2076        maxRawDim = INT32_MAX;
2077
2078        for (size_t i = 0; i < count; i++) {
2079            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2080                    gCamCapability[mCameraId]->raw_dim[i].height;
2081            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2082                maxRawDim = dimension;
2083        }
2084    }
2085
2086    //Find minimum durations for processed, jpeg, and raw
2087    for (size_t i = 0; i < count; i++) {
2088        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2089                gCamCapability[mCameraId]->raw_dim[i].height) {
2090            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2091            break;
2092        }
2093    }
2094    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2095    for (size_t i = 0; i < count; i++) {
2096        if (maxProcessedDim ==
2097                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2098                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2099            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2100            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2101            break;
2102        }
2103    }
2104}
2105
2106/*===========================================================================
2107 * FUNCTION   : getMinFrameDuration
2108 *
2109 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2110 *              and current request configuration.
2111 *
2112 * PARAMETERS : @request: requset sent by the frameworks
2113 *
2114 * RETURN     : min farme duration for a particular request
2115 *
2116 *==========================================================================*/
2117int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2118{
2119    bool hasJpegStream = false;
2120    bool hasRawStream = false;
2121    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2122        const camera3_stream_t *stream = request->output_buffers[i].stream;
2123        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2124            hasJpegStream = true;
2125        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2126                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2127                stream->format == HAL_PIXEL_FORMAT_RAW16)
2128            hasRawStream = true;
2129    }
2130
2131    if (!hasJpegStream)
2132        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2133    else
2134        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2135}
2136
2137/*===========================================================================
2138 * FUNCTION   : handlePendingReprocResults
2139 *
2140 * DESCRIPTION: check and notify on any pending reprocess results
2141 *
2142 * PARAMETERS :
2143 *   @frame_number   : Pending request frame number
2144 *
2145 * RETURN     : int32_t type of status
2146 *              NO_ERROR  -- success
2147 *              none-zero failure code
2148 *==========================================================================*/
2149int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2150{
2151    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2152            j != mPendingReprocessResultList.end(); j++) {
2153        if (j->frame_number == frame_number) {
2154            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2155
2156            CDBG("%s: Delayed reprocess notify %d", __func__,
2157                    frame_number);
2158
2159            for (pendingRequestIterator k = mPendingRequestsList.begin();
2160                    k != mPendingRequestsList.end(); k++) {
2161
2162                if (k->frame_number == j->frame_number) {
2163                    CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2164                            "Take it out!!", __func__,
2165                            k->frame_number);
2166
2167                    camera3_capture_result result;
2168                    memset(&result, 0, sizeof(camera3_capture_result));
2169                    result.frame_number = frame_number;
2170                    result.num_output_buffers = 1;
2171                    result.output_buffers =  &j->buffer;
2172                    result.input_buffer = k->input_buffer;
2173                    result.result = k->settings;
2174                    result.partial_result = PARTIAL_RESULT_COUNT;
2175                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2176
2177                    erasePendingRequest(k);
2178                    break;
2179                }
2180            }
2181            mPendingReprocessResultList.erase(j);
2182            break;
2183        }
2184    }
2185    return NO_ERROR;
2186}
2187
2188/*===========================================================================
2189 * FUNCTION   : handleBatchMetadata
2190 *
2191 * DESCRIPTION: Handles metadata buffer callback in batch mode
2192 *
2193 * PARAMETERS : @metadata_buf: metadata buffer
2194 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2195 *                 the meta buf in this method
2196 *
2197 * RETURN     :
2198 *
2199 *==========================================================================*/
2200void QCamera3HardwareInterface::handleBatchMetadata(
2201        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2202{
2203    ATRACE_CALL();
2204
2205    if (NULL == metadata_buf) {
2206        ALOGE("%s: metadata_buf is NULL", __func__);
2207        return;
2208    }
2209    /* In batch mode, the metdata will contain the frame number and timestamp of
2210     * the last frame in the batch. Eg: a batch containing buffers from request
2211     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2212     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2213     * multiple process_capture_results */
2214    metadata_buffer_t *metadata =
2215            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2216    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2217    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2218    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2219    uint32_t frame_number = 0, urgent_frame_number = 0;
2220    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2221    bool invalid_metadata = false;
2222    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2223    size_t loopCount = 1;
2224
2225    int32_t *p_frame_number_valid =
2226            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2227    uint32_t *p_frame_number =
2228            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2229    int64_t *p_capture_time =
2230            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2231    int32_t *p_urgent_frame_number_valid =
2232            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2233    uint32_t *p_urgent_frame_number =
2234            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2235
2236    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2237            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2238            (NULL == p_urgent_frame_number)) {
2239        ALOGE("%s: Invalid metadata", __func__);
2240        invalid_metadata = true;
2241    } else {
2242        frame_number_valid = *p_frame_number_valid;
2243        last_frame_number = *p_frame_number;
2244        last_frame_capture_time = *p_capture_time;
2245        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2246        last_urgent_frame_number = *p_urgent_frame_number;
2247    }
2248
2249    /* In batchmode, when no video buffers are requested, set_parms are sent
2250     * for every capture_request. The difference between consecutive urgent
2251     * frame numbers and frame numbers should be used to interpolate the
2252     * corresponding frame numbers and time stamps */
2253    pthread_mutex_lock(&mMutex);
2254    if (urgent_frame_number_valid) {
2255        first_urgent_frame_number =
2256                mPendingBatchMap.valueFor(last_urgent_frame_number);
2257        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2258                first_urgent_frame_number;
2259
2260        CDBG("%s: urgent_frm: valid: %d frm_num: %d - %d",
2261                __func__, urgent_frame_number_valid,
2262                first_urgent_frame_number, last_urgent_frame_number);
2263    }
2264
2265    if (frame_number_valid) {
2266        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2267        frameNumDiff = last_frame_number + 1 -
2268                first_frame_number;
2269        mPendingBatchMap.removeItem(last_frame_number);
2270
2271        CDBG("%s:        frm: valid: %d frm_num: %d - %d",
2272                __func__, frame_number_valid,
2273                first_frame_number, last_frame_number);
2274
2275    }
2276    pthread_mutex_unlock(&mMutex);
2277
2278    if (urgent_frame_number_valid || frame_number_valid) {
2279        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2280        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2281            ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
2282                    __func__, urgentFrameNumDiff, last_urgent_frame_number);
2283        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2284            ALOGE("%s: frameNumDiff: %d frameNum: %d",
2285                    __func__, frameNumDiff, last_frame_number);
2286    }
2287
2288    for (size_t i = 0; i < loopCount; i++) {
2289        /* handleMetadataWithLock is called even for invalid_metadata for
2290         * pipeline depth calculation */
2291        if (!invalid_metadata) {
2292            /* Infer frame number. Batch metadata contains frame number of the
2293             * last frame */
2294            if (urgent_frame_number_valid) {
2295                if (i < urgentFrameNumDiff) {
2296                    urgent_frame_number =
2297                            first_urgent_frame_number + i;
2298                    CDBG("%s: inferred urgent frame_number: %d",
2299                            __func__, urgent_frame_number);
2300                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2301                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2302                } else {
2303                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2304                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2305                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2306                }
2307            }
2308
2309            /* Infer frame number. Batch metadata contains frame number of the
2310             * last frame */
2311            if (frame_number_valid) {
2312                if (i < frameNumDiff) {
2313                    frame_number = first_frame_number + i;
2314                    CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2315                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2316                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2317                } else {
2318                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2319                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2320                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2321                }
2322            }
2323
2324            if (last_frame_capture_time) {
2325                //Infer timestamp
2326                first_frame_capture_time = last_frame_capture_time -
2327                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2328                capture_time =
2329                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2330                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2331                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2332                CDBG("%s: batch capture_time: %lld, capture_time: %lld",
2333                        __func__, last_frame_capture_time, capture_time);
2334            }
2335        }
2336        pthread_mutex_lock(&mMutex);
2337        handleMetadataWithLock(metadata_buf,
2338                false /* free_and_bufdone_meta_buf */);
2339        pthread_mutex_unlock(&mMutex);
2340    }
2341
2342done_batch_metadata:
2343    /* BufDone metadata buffer */
2344    if (free_and_bufdone_meta_buf) {
2345        mMetadataChannel->bufDone(metadata_buf);
2346        free(metadata_buf);
2347    }
2348}
2349
2350/*===========================================================================
2351 * FUNCTION   : handleMetadataWithLock
2352 *
2353 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2354 *
2355 * PARAMETERS : @metadata_buf: metadata buffer
2356 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2357 *                 the meta buf in this method
2358 *
2359 * RETURN     :
2360 *
2361 *==========================================================================*/
2362void QCamera3HardwareInterface::handleMetadataWithLock(
2363    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2364{
2365    ATRACE_CALL();
2366
2367    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2368    int32_t frame_number_valid, urgent_frame_number_valid;
2369    uint32_t frame_number, urgent_frame_number;
2370    int64_t capture_time;
2371
2372    int32_t *p_frame_number_valid =
2373            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2374    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2375    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2376    int32_t *p_urgent_frame_number_valid =
2377            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2378    uint32_t *p_urgent_frame_number =
2379            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2380    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2381            metadata) {
2382        CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2383                __func__, *p_frame_number_valid, *p_frame_number);
2384    }
2385
2386    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2387            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2388        ALOGE("%s: Invalid metadata", __func__);
2389        if (free_and_bufdone_meta_buf) {
2390            mMetadataChannel->bufDone(metadata_buf);
2391            free(metadata_buf);
2392        }
2393        goto done_metadata;
2394    } else {
2395        frame_number_valid = *p_frame_number_valid;
2396        frame_number = *p_frame_number;
2397        capture_time = *p_capture_time;
2398        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2399        urgent_frame_number = *p_urgent_frame_number;
2400    }
2401    //Partial result on process_capture_result for timestamp
2402    if (urgent_frame_number_valid) {
2403        CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2404          __func__, urgent_frame_number, capture_time);
2405
2406        //Recieved an urgent Frame Number, handle it
2407        //using partial results
2408        for (pendingRequestIterator i =
2409                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2410            CDBG("%s: Iterator Frame = %d urgent frame = %d",
2411                __func__, i->frame_number, urgent_frame_number);
2412
2413            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2414                (i->partial_result_cnt == 0)) {
2415                ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2416                    __func__, i->frame_number);
2417            }
2418
2419            if (i->frame_number == urgent_frame_number &&
2420                     i->bUrgentReceived == 0) {
2421
2422                camera3_capture_result_t result;
2423                memset(&result, 0, sizeof(camera3_capture_result_t));
2424
2425                i->partial_result_cnt++;
2426                i->bUrgentReceived = 1;
2427                // Extract 3A metadata
2428                result.result =
2429                    translateCbUrgentMetadataToResultMetadata(metadata);
2430                // Populate metadata result
2431                result.frame_number = urgent_frame_number;
2432                result.num_output_buffers = 0;
2433                result.output_buffers = NULL;
2434                result.partial_result = i->partial_result_cnt;
2435
2436                mCallbackOps->process_capture_result(mCallbackOps, &result);
2437                CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2438                     __func__, result.frame_number, capture_time);
2439                free_camera_metadata((camera_metadata_t *)result.result);
2440                break;
2441            }
2442        }
2443    }
2444
2445    if (!frame_number_valid) {
2446        CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2447        if (free_and_bufdone_meta_buf) {
2448            mMetadataChannel->bufDone(metadata_buf);
2449            free(metadata_buf);
2450        }
2451        goto done_metadata;
2452    }
2453    CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2454            frame_number, capture_time);
2455
2456    for (pendingRequestIterator i = mPendingRequestsList.begin();
2457            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2458        // Flush out all entries with less or equal frame numbers.
2459
2460        camera3_capture_result_t result;
2461        memset(&result, 0, sizeof(camera3_capture_result_t));
2462
2463        CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2464        i->partial_result_cnt++;
2465        result.partial_result = i->partial_result_cnt;
2466
2467        // Check whether any stream buffer corresponding to this is dropped or not
2468        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2469        // The API does not expect a blob buffer to be dropped
2470        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2471            /* Clear notify_msg structure */
2472            camera3_notify_msg_t notify_msg;
2473            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2474            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2475                    j != i->buffers.end(); j++) {
2476               if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2477                   QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2478                   uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2479                   for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2480                       if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2481                           // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2482                           ALOGW("%s: Start of reporting error frame#=%u, streamID=%u",
2483                                   __func__, i->frame_number, streamID);
2484                           notify_msg.type = CAMERA3_MSG_ERROR;
2485                           notify_msg.message.error.frame_number = i->frame_number;
2486                           notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2487                           notify_msg.message.error.error_stream = j->stream;
2488                           mCallbackOps->notify(mCallbackOps, &notify_msg);
2489                           ALOGW("%s: End of reporting error frame#=%u, streamID=%u",
2490                                  __func__, i->frame_number, streamID);
2491                           PendingFrameDropInfo PendingFrameDrop;
2492                           PendingFrameDrop.frame_number=i->frame_number;
2493                           PendingFrameDrop.stream_ID = streamID;
2494                           // Add the Frame drop info to mPendingFrameDropList
2495                           mPendingFrameDropList.push_back(PendingFrameDrop);
2496                      }
2497                   }
2498               } else {
2499                   ALOGE("%s: JPEG buffer dropped for frame number %d",
2500                           __func__, i->frame_number);
2501               }
2502            }
2503        }
2504
2505        //TODO: batch handling for dropped metadata
2506
2507        // Send empty metadata with already filled buffers for dropped metadata
2508        // and send valid metadata with already filled buffers for current metadata
2509        /* we could hit this case when we either
2510         * 1. have a pending reprocess request or
2511         * 2. miss a metadata buffer callback */
2512        if (i->frame_number < frame_number) {
2513            /* Clear notify_msg structure */
2514            camera3_notify_msg_t notify_msg;
2515            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2516            notify_msg.type = CAMERA3_MSG_SHUTTER;
2517            notify_msg.message.shutter.frame_number = i->frame_number;
2518            notify_msg.message.shutter.timestamp = (uint64_t)capture_time -
2519                        (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
2520            if (i->input_buffer) {
2521                i->partial_result_cnt++; //input request will not have urgent metadata
2522                CameraMetadata settings;
2523                if(i->settings) {
2524                    settings = i->settings;
2525                    if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2526                        nsecs_t input_capture_time =
2527                                settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2528                        notify_msg.message.shutter.timestamp = (uint64_t)input_capture_time;
2529                    } else {
2530                        ALOGE("%s: No timestamp in input settings! Using current one.",
2531                                __func__);
2532                    }
2533                } else {
2534                    ALOGE("%s: Input settings missing!", __func__);
2535                }
2536                result.result = settings.release();
2537                result.partial_result = i->partial_result_cnt;
2538                CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2539                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2540            } else {
2541                mPendingLiveRequest--;
2542                CameraMetadata dummyMetadata;
2543                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
2544                        &i->timestamp, 1);
2545                dummyMetadata.update(ANDROID_REQUEST_ID,
2546                        &(i->request_id), 1);
2547                result.result = dummyMetadata.release();
2548            }
2549            mCallbackOps->notify(mCallbackOps, &notify_msg);
2550            i->timestamp = (nsecs_t)notify_msg.message.shutter.timestamp;
2551            CDBG("%s: Support notification !!!! notify frame_number = %u, capture_time = %llu",
2552                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2553        } else {
2554            mPendingLiveRequest--;
2555            /* Clear notify_msg structure */
2556            camera3_notify_msg_t notify_msg;
2557            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2558
2559            // Send shutter notify to frameworks
2560            notify_msg.type = CAMERA3_MSG_SHUTTER;
2561            notify_msg.message.shutter.frame_number = i->frame_number;
2562            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2563            mCallbackOps->notify(mCallbackOps, &notify_msg);
2564
2565            i->timestamp = capture_time;
2566
2567            // Find channel requiring metadata, meaning internal offline postprocess
2568            // is needed.
2569            //TODO: for now, we don't support two streams requiring metadata at the same time.
2570            // (because we are not making copies, and metadata buffer is not reference counted.
2571            bool internalPproc = false;
2572            for (pendingBufferIterator iter = i->buffers.begin();
2573                    iter != i->buffers.end(); iter++) {
2574                if (iter->need_metadata) {
2575                    internalPproc = true;
2576                    QCamera3ProcessingChannel *channel =
2577                            (QCamera3ProcessingChannel *)iter->stream->priv;
2578                    channel->queueReprocMetadata(metadata_buf);
2579                    break;
2580                }
2581            }
2582
2583            result.result = translateFromHalMetadata(metadata,
2584                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2585                    i->capture_intent, internalPproc);
2586
2587            saveExifParams(metadata);
2588
2589            if (i->blob_request) {
2590                {
2591                    //Dump tuning metadata if enabled and available
2592                    char prop[PROPERTY_VALUE_MAX];
2593                    memset(prop, 0, sizeof(prop));
2594                    property_get("persist.camera.dumpmetadata", prop, "0");
2595                    int32_t enabled = atoi(prop);
2596                    if (enabled && metadata->is_tuning_params_valid) {
2597                        dumpMetadataToFile(metadata->tuning_params,
2598                               mMetaFrameCount,
2599                               enabled,
2600                               "Snapshot",
2601                               frame_number);
2602                    }
2603                }
2604            }
2605
2606            if (!internalPproc) {
2607                CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2608                // Return metadata buffer
2609                if (free_and_bufdone_meta_buf) {
2610                    mMetadataChannel->bufDone(metadata_buf);
2611                    free(metadata_buf);
2612                }
2613            }
2614        }
2615        if (!result.result) {
2616            ALOGE("%s: metadata is NULL", __func__);
2617        }
2618        result.frame_number = i->frame_number;
2619        result.input_buffer = i->input_buffer;
2620        result.num_output_buffers = 0;
2621        result.output_buffers = NULL;
2622        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2623                    j != i->buffers.end(); j++) {
2624            if (j->buffer) {
2625                result.num_output_buffers++;
2626            }
2627        }
2628
2629        if (result.num_output_buffers > 0) {
2630            camera3_stream_buffer_t *result_buffers =
2631                new camera3_stream_buffer_t[result.num_output_buffers];
2632            if (!result_buffers) {
2633                ALOGE("%s: Fatal error: out of memory", __func__);
2634            }
2635            size_t result_buffers_idx = 0;
2636            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2637                    j != i->buffers.end(); j++) {
2638                if (j->buffer) {
2639                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2640                            m != mPendingFrameDropList.end(); m++) {
2641                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2642                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2643                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2644                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2645                            ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2646                                  __func__, frame_number, streamID);
2647                            m = mPendingFrameDropList.erase(m);
2648                            break;
2649                        }
2650                    }
2651
2652                    for (List<PendingBufferInfo>::iterator k =
2653                      mPendingBuffersMap.mPendingBufferList.begin();
2654                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2655                      if (k->buffer == j->buffer->buffer) {
2656                        CDBG("%s: Found buffer %p in pending buffer List "
2657                              "for frame %u, Take it out!!", __func__,
2658                               k->buffer, k->frame_number);
2659                        mPendingBuffersMap.num_buffers--;
2660                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2661                        break;
2662                      }
2663                    }
2664
2665                    result_buffers[result_buffers_idx++] = *(j->buffer);
2666                    free(j->buffer);
2667                    j->buffer = NULL;
2668                }
2669            }
2670            result.output_buffers = result_buffers;
2671            mCallbackOps->process_capture_result(mCallbackOps, &result);
2672            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2673                    __func__, __LINE__, result.frame_number, i->timestamp);
2674            free_camera_metadata((camera_metadata_t *)result.result);
2675            delete[] result_buffers;
2676        } else {
2677            mCallbackOps->process_capture_result(mCallbackOps, &result);
2678            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2679                        __func__, __LINE__, result.frame_number, i->timestamp);
2680            free_camera_metadata((camera_metadata_t *)result.result);
2681        }
2682        // erase the element from the list
2683        i = erasePendingRequest(i);
2684
2685        if (!mPendingReprocessResultList.empty()) {
2686            handlePendingReprocResults(frame_number + 1);
2687        }
2688    }
2689
2690done_metadata:
2691    for (pendingRequestIterator i = mPendingRequestsList.begin();
2692            i != mPendingRequestsList.end() ;i++) {
2693        i->pipeline_depth++;
2694    }
2695    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2696    unblockRequestIfNecessary();
2697
2698}
2699
2700/*===========================================================================
2701 * FUNCTION   : hdrPlusPerfLock
2702 *
2703 * DESCRIPTION: perf lock for HDR+ using custom intent
2704 *
2705 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2706 *
2707 * RETURN     : None
2708 *
2709 *==========================================================================*/
2710void QCamera3HardwareInterface::hdrPlusPerfLock(
2711        mm_camera_super_buf_t *metadata_buf)
2712{
2713    if (NULL == metadata_buf) {
2714        ALOGE("%s: metadata_buf is NULL", __func__);
2715        return;
2716    }
2717    metadata_buffer_t *metadata =
2718            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2719    int32_t *p_frame_number_valid =
2720            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2721    uint32_t *p_frame_number =
2722            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2723
2724    //acquire perf lock for 5 sec after the last HDR frame is captured
2725    if (*p_frame_number_valid) {
2726        if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2727            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2728        }
2729    }
2730
2731    //release lock after perf lock timer is expired. If lock is already released,
2732    //isTimerReset returns false
2733    if (m_perfLock.isTimerReset()) {
2734        mLastCustIntentFrmNum = -1;
2735        m_perfLock.lock_rel_timed();
2736    }
2737}
2738/*===========================================================================
2739 * FUNCTION   : handleBufferWithLock
2740 *
2741 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2742 *
2743 * PARAMETERS : @buffer: image buffer for the callback
2744 *              @frame_number: frame number of the image buffer
2745 *
2746 * RETURN     :
2747 *
2748 *==========================================================================*/
2749void QCamera3HardwareInterface::handleBufferWithLock(
2750    camera3_stream_buffer_t *buffer, uint32_t frame_number)
2751{
2752    ATRACE_CALL();
2753    // If the frame number doesn't exist in the pending request list,
2754    // directly send the buffer to the frameworks, and update pending buffers map
2755    // Otherwise, book-keep the buffer.
2756    pendingRequestIterator i = mPendingRequestsList.begin();
2757    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2758        i++;
2759    }
2760    if (i == mPendingRequestsList.end()) {
2761        // Verify all pending requests frame_numbers are greater
2762        for (pendingRequestIterator j = mPendingRequestsList.begin();
2763                j != mPendingRequestsList.end(); j++) {
2764            if (j->frame_number < frame_number) {
2765                ALOGE("%s: Error: pending frame number %d is smaller than %d",
2766                        __func__, j->frame_number, frame_number);
2767            }
2768        }
2769        camera3_capture_result_t result;
2770        memset(&result, 0, sizeof(camera3_capture_result_t));
2771        result.result = NULL;
2772        result.frame_number = frame_number;
2773        result.num_output_buffers = 1;
2774        result.partial_result = 0;
2775        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2776                m != mPendingFrameDropList.end(); m++) {
2777            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
2778            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2779            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
2780                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2781                CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
2782                        __func__, frame_number, streamID);
2783                m = mPendingFrameDropList.erase(m);
2784                break;
2785            }
2786        }
2787        result.output_buffers = buffer;
2788        CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
2789                __func__, frame_number, buffer->buffer);
2790
2791        for (List<PendingBufferInfo>::iterator k =
2792                mPendingBuffersMap.mPendingBufferList.begin();
2793                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2794            if (k->buffer == buffer->buffer) {
2795                CDBG("%s: Found Frame buffer, take it out from list",
2796                        __func__);
2797
2798                mPendingBuffersMap.num_buffers--;
2799                k = mPendingBuffersMap.mPendingBufferList.erase(k);
2800                break;
2801            }
2802        }
2803        CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2804            __func__, mPendingBuffersMap.num_buffers);
2805
2806        mCallbackOps->process_capture_result(mCallbackOps, &result);
2807    } else {
2808        if (i->input_buffer) {
2809            CameraMetadata settings;
2810            camera3_notify_msg_t notify_msg;
2811            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2812            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2813            if(i->settings) {
2814                settings = i->settings;
2815                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2816                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2817                } else {
2818                    ALOGE("%s: No timestamp in input settings! Using current one.",
2819                            __func__);
2820                }
2821            } else {
2822                ALOGE("%s: Input settings missing!", __func__);
2823            }
2824
2825            notify_msg.type = CAMERA3_MSG_SHUTTER;
2826            notify_msg.message.shutter.frame_number = frame_number;
2827            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2828
2829            if (i->input_buffer->release_fence != -1) {
2830               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2831               close(i->input_buffer->release_fence);
2832               if (rc != OK) {
2833               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2834               }
2835            }
2836
2837            for (List<PendingBufferInfo>::iterator k =
2838                    mPendingBuffersMap.mPendingBufferList.begin();
2839                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2840                if (k->buffer == buffer->buffer) {
2841                    CDBG("%s: Found Frame buffer, take it out from list",
2842                            __func__);
2843
2844                    mPendingBuffersMap.num_buffers--;
2845                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
2846                    break;
2847                }
2848            }
2849            CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2850                __func__, mPendingBuffersMap.num_buffers);
2851
2852            bool notifyNow = true;
2853            for (pendingRequestIterator j = mPendingRequestsList.begin();
2854                    j != mPendingRequestsList.end(); j++) {
2855                if (j->frame_number < frame_number) {
2856                    notifyNow = false;
2857                    break;
2858                }
2859            }
2860
2861            if (notifyNow) {
2862                camera3_capture_result result;
2863                memset(&result, 0, sizeof(camera3_capture_result));
2864                result.frame_number = frame_number;
2865                result.result = i->settings;
2866                result.input_buffer = i->input_buffer;
2867                result.num_output_buffers = 1;
2868                result.output_buffers = buffer;
2869                result.partial_result = PARTIAL_RESULT_COUNT;
2870
2871                mCallbackOps->notify(mCallbackOps, &notify_msg);
2872                mCallbackOps->process_capture_result(mCallbackOps, &result);
2873                CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2874                i = erasePendingRequest(i);
2875            } else {
2876                // Cache reprocess result for later
2877                PendingReprocessResult pendingResult;
2878                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2879                pendingResult.notify_msg = notify_msg;
2880                pendingResult.buffer = *buffer;
2881                pendingResult.frame_number = frame_number;
2882                mPendingReprocessResultList.push_back(pendingResult);
2883                CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2884            }
2885        } else {
2886            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2887                j != i->buffers.end(); j++) {
2888                if (j->stream == buffer->stream) {
2889                    if (j->buffer != NULL) {
2890                        ALOGE("%s: Error: buffer is already set", __func__);
2891                    } else {
2892                        j->buffer = (camera3_stream_buffer_t *)malloc(
2893                            sizeof(camera3_stream_buffer_t));
2894                        *(j->buffer) = *buffer;
2895                        CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
2896                            __func__, buffer, frame_number);
2897                    }
2898                }
2899            }
2900        }
2901    }
2902}
2903
2904/*===========================================================================
2905 * FUNCTION   : unblockRequestIfNecessary
2906 *
2907 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2908 *              that mMutex is held when this function is called.
2909 *
2910 * PARAMETERS :
2911 *
2912 * RETURN     :
2913 *
2914 *==========================================================================*/
2915void QCamera3HardwareInterface::unblockRequestIfNecessary()
2916{
2917   // Unblock process_capture_request
2918   pthread_cond_signal(&mRequestCond);
2919}
2920
2921
2922/*===========================================================================
2923 * FUNCTION   : processCaptureRequest
2924 *
2925 * DESCRIPTION: process a capture request from camera service
2926 *
2927 * PARAMETERS :
2928 *   @request : request from framework to process
2929 *
2930 * RETURN     :
2931 *
2932 *==========================================================================*/
2933int QCamera3HardwareInterface::processCaptureRequest(
2934                    camera3_capture_request_t *request)
2935{
2936    ATRACE_CALL();
2937    int rc = NO_ERROR;
2938    int32_t request_id;
2939    CameraMetadata meta;
2940    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
2941    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
2942    bool isVidBufRequested = false;
2943    camera3_stream_buffer_t *pInputBuffer = NULL;
2944
2945    pthread_mutex_lock(&mMutex);
2946
2947    rc = validateCaptureRequest(request);
2948    if (rc != NO_ERROR) {
2949        ALOGE("%s: incoming request is not valid", __func__);
2950        pthread_mutex_unlock(&mMutex);
2951        return rc;
2952    }
2953
2954    meta = request->settings;
2955
2956    // For first capture request, send capture intent, and
2957    // stream on all streams
2958    if (mFirstRequest) {
2959        // send an unconfigure to the backend so that the isp
2960        // resources are deallocated
2961        if (!mFirstConfiguration) {
2962            cam_stream_size_info_t stream_config_info;
2963            int32_t hal_version = CAM_HAL_V3;
2964            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
2965            stream_config_info.buffer_info.min_buffers =
2966                    MIN_INFLIGHT_REQUESTS;
2967            stream_config_info.buffer_info.max_buffers =
2968                    MAX_INFLIGHT_REQUESTS;
2969            clear_metadata_buffer(mParameters);
2970            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2971                    CAM_INTF_PARM_HAL_VERSION, hal_version);
2972            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2973                    CAM_INTF_META_STREAM_INFO, stream_config_info);
2974            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2975                    mParameters);
2976            if (rc < 0) {
2977                ALOGE("%s: set_parms for unconfigure failed", __func__);
2978                pthread_mutex_unlock(&mMutex);
2979                return rc;
2980            }
2981        }
2982        m_perfLock.lock_acq();
2983        /* get eis information for stream configuration */
2984        cam_is_type_t is_type;
2985        char is_type_value[PROPERTY_VALUE_MAX];
2986        property_get("persist.camera.is_type", is_type_value, "0");
2987        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2988
2989        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2990            int32_t hal_version = CAM_HAL_V3;
2991            uint8_t captureIntent =
2992                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2993            mCaptureIntent = captureIntent;
2994            clear_metadata_buffer(mParameters);
2995            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
2996            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
2997        }
2998
2999        //If EIS is enabled, turn it on for video
3000        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3001        int32_t vsMode;
3002        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3003        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3004            rc = BAD_VALUE;
3005        }
3006
3007        //IS type will be 0 unless EIS is supported. If EIS is supported
3008        //it could either be 1 or 4 depending on the stream and video size
3009        if (setEis) {
3010            if (!m_bEisSupportedSize) {
3011                is_type = IS_TYPE_DIS;
3012            } else {
3013                is_type = IS_TYPE_EIS_2_0;
3014            }
3015            mStreamConfigInfo.is_type = is_type;
3016        } else {
3017            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3018        }
3019
3020        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3021                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3022        int32_t tintless_value = 1;
3023        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3024                CAM_INTF_PARM_TINTLESS, tintless_value);
3025        //Disable CDS for HFR mode and if mPprocBypass = true.
3026        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3027        //after every configure_stream
3028        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3029                (m_bIsVideo)) {
3030            int32_t cds = CAM_CDS_MODE_OFF;
3031            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3032                    CAM_INTF_PARM_CDS_MODE, cds))
3033                ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3034
3035        }
3036        setMobicat();
3037
3038        /* Set fps and hfr mode while sending meta stream info so that sensor
3039         * can configure appropriate streaming mode */
3040        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3041        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3042            rc = setHalFpsRange(meta, mParameters);
3043            if (rc != NO_ERROR) {
3044                ALOGE("%s: setHalFpsRange failed", __func__);
3045            }
3046        }
3047        if (meta.exists(ANDROID_CONTROL_MODE)) {
3048            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3049            rc = extractSceneMode(meta, metaMode, mParameters);
3050            if (rc != NO_ERROR) {
3051                ALOGE("%s: extractSceneMode failed", __func__);
3052            }
3053        }
3054
3055        //TODO: validate the arguments, HSV scenemode should have only the
3056        //advertised fps ranges
3057
3058        /*set the capture intent, hal version, tintless, stream info,
3059         *and disenable parameters to the backend*/
3060        CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3061        mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3062                    mParameters);
3063
3064        cam_dimension_t sensor_dim;
3065        memset(&sensor_dim, 0, sizeof(sensor_dim));
3066        rc = getSensorOutputSize(sensor_dim);
3067        if (rc != NO_ERROR) {
3068            ALOGE("%s: Failed to get sensor output size", __func__);
3069            pthread_mutex_unlock(&mMutex);
3070            goto error_exit;
3071        }
3072
3073        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3074                gCamCapability[mCameraId]->active_array_size.height,
3075                sensor_dim.width, sensor_dim.height);
3076
3077        /* Set batchmode before initializing channel. Since registerBuffer
3078         * internally initializes some of the channels, better set batchmode
3079         * even before first register buffer */
3080        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3081            it != mStreamInfo.end(); it++) {
3082            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3083            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3084                    && mBatchSize) {
3085                rc = channel->setBatchSize(mBatchSize);
3086                //Disable per frame map unmap for HFR/batchmode case
3087                rc |= channel->setPerFrameMapUnmap(false);
3088                if (NO_ERROR != rc) {
3089                    ALOGE("%s : Channel init failed %d", __func__, rc);
3090                    pthread_mutex_unlock(&mMutex);
3091                    goto error_exit;
3092                }
3093            }
3094        }
3095
3096        //First initialize all streams
3097        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3098            it != mStreamInfo.end(); it++) {
3099            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3100            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3101               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3102               setEis)
3103                rc = channel->initialize(is_type);
3104            else {
3105                rc = channel->initialize(IS_TYPE_NONE);
3106            }
3107            if (NO_ERROR != rc) {
3108                ALOGE("%s : Channel initialization failed %d", __func__, rc);
3109                pthread_mutex_unlock(&mMutex);
3110                goto error_exit;
3111            }
3112        }
3113
3114        if (mRawDumpChannel) {
3115            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3116            if (rc != NO_ERROR) {
3117                ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3118                pthread_mutex_unlock(&mMutex);
3119                goto error_exit;
3120            }
3121        }
3122        if (mSupportChannel) {
3123            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3124            if (rc < 0) {
3125                ALOGE("%s: Support channel initialization failed", __func__);
3126                pthread_mutex_unlock(&mMutex);
3127                goto error_exit;
3128            }
3129        }
3130        if (mAnalysisChannel) {
3131            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3132            if (rc < 0) {
3133                ALOGE("%s: Analysis channel initialization failed", __func__);
3134                pthread_mutex_unlock(&mMutex);
3135                goto error_exit;
3136            }
3137        }
3138        if (mDummyBatchChannel) {
3139            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3140            if (rc < 0) {
3141                ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3142                pthread_mutex_unlock(&mMutex);
3143                goto error_exit;
3144            }
3145            rc = mDummyBatchChannel->initialize(is_type);
3146            if (rc < 0) {
3147                ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3148                pthread_mutex_unlock(&mMutex);
3149                goto error_exit;
3150            }
3151        }
3152
3153        // Set bundle info
3154        rc = setBundleInfo();
3155        if (rc < 0) {
3156            ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3157            pthread_mutex_unlock(&mMutex);
3158            goto error_exit;
3159        }
3160
3161        //Then start them.
3162        CDBG_HIGH("%s: Start META Channel", __func__);
3163        rc = mMetadataChannel->start();
3164        if (rc < 0) {
3165            ALOGE("%s: META channel start failed", __func__);
3166            pthread_mutex_unlock(&mMutex);
3167            goto error_exit;
3168        }
3169
3170        if (mAnalysisChannel) {
3171            rc = mAnalysisChannel->start();
3172            if (rc < 0) {
3173                ALOGE("%s: Analysis channel start failed", __func__);
3174                mMetadataChannel->stop();
3175                pthread_mutex_unlock(&mMutex);
3176                goto error_exit;
3177            }
3178        }
3179
3180        if (mSupportChannel) {
3181            rc = mSupportChannel->start();
3182            if (rc < 0) {
3183                ALOGE("%s: Support channel start failed", __func__);
3184                mMetadataChannel->stop();
3185                /* Although support and analysis are mutually exclusive today
3186                   adding it in anycase for future proofing */
3187                if (mAnalysisChannel) {
3188                    mAnalysisChannel->stop();
3189                }
3190                pthread_mutex_unlock(&mMutex);
3191                goto error_exit;
3192            }
3193        }
3194        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3195            it != mStreamInfo.end(); it++) {
3196            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3197            CDBG_HIGH("%s: Start Processing Channel mask=%d",
3198                    __func__, channel->getStreamTypeMask());
3199            rc = channel->start();
3200            if (rc < 0) {
3201                ALOGE("%s: channel start failed", __func__);
3202                pthread_mutex_unlock(&mMutex);
3203                goto error_exit;
3204            }
3205        }
3206
3207        if (mRawDumpChannel) {
3208            CDBG("%s: Starting raw dump stream",__func__);
3209            rc = mRawDumpChannel->start();
3210            if (rc != NO_ERROR) {
3211                ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3212                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3213                      it != mStreamInfo.end(); it++) {
3214                    QCamera3Channel *channel =
3215                        (QCamera3Channel *)(*it)->stream->priv;
3216                    ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3217                        channel->getStreamTypeMask());
3218                    channel->stop();
3219                }
3220                if (mSupportChannel)
3221                    mSupportChannel->stop();
3222                if (mAnalysisChannel) {
3223                    mAnalysisChannel->stop();
3224                }
3225                mMetadataChannel->stop();
3226                pthread_mutex_unlock(&mMutex);
3227                goto error_exit;
3228            }
3229        }
3230
3231        if (mChannelHandle) {
3232
3233            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3234                    mChannelHandle);
3235            if (rc != NO_ERROR) {
3236                ALOGE("%s: start_channel failed %d", __func__, rc);
3237                pthread_mutex_unlock(&mMutex);
3238                goto error_exit;
3239            }
3240        }
3241
3242
3243        goto no_error;
3244error_exit:
3245        m_perfLock.lock_rel();
3246        return rc;
3247no_error:
3248        m_perfLock.lock_rel();
3249
3250        mWokenUpByDaemon = false;
3251        mPendingLiveRequest = 0;
3252        mFirstConfiguration = false;
3253        enablePowerHint();
3254    }
3255
3256    uint32_t frameNumber = request->frame_number;
3257    cam_stream_ID_t streamID;
3258
3259    if (meta.exists(ANDROID_REQUEST_ID)) {
3260        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3261        mCurrentRequestId = request_id;
3262        CDBG("%s: Received request with id: %d",__func__, request_id);
3263    } else if (mFirstRequest || mCurrentRequestId == -1){
3264        ALOGE("%s: Unable to find request id field, \
3265                & no previous id available", __func__);
3266        pthread_mutex_unlock(&mMutex);
3267        return NAME_NOT_FOUND;
3268    } else {
3269        CDBG("%s: Re-using old request id", __func__);
3270        request_id = mCurrentRequestId;
3271    }
3272
3273    CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3274                                    __func__, __LINE__,
3275                                    request->num_output_buffers,
3276                                    request->input_buffer,
3277                                    frameNumber);
3278    // Acquire all request buffers first
3279    streamID.num_streams = 0;
3280    int blob_request = 0;
3281    uint32_t snapshotStreamId = 0;
3282    for (size_t i = 0; i < request->num_output_buffers; i++) {
3283        const camera3_stream_buffer_t& output = request->output_buffers[i];
3284        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3285
3286        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3287            //Call function to store local copy of jpeg data for encode params.
3288            blob_request = 1;
3289            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3290        }
3291
3292        if (output.acquire_fence != -1) {
3293           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3294           close(output.acquire_fence);
3295           if (rc != OK) {
3296              ALOGE("%s: sync wait failed %d", __func__, rc);
3297              pthread_mutex_unlock(&mMutex);
3298              return rc;
3299           }
3300        }
3301
3302        streamID.streamID[streamID.num_streams] =
3303            channel->getStreamID(channel->getStreamTypeMask());
3304        streamID.num_streams++;
3305
3306        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3307            isVidBufRequested = true;
3308        }
3309    }
3310
3311    if (blob_request && mRawDumpChannel) {
3312        CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3313        streamID.streamID[streamID.num_streams] =
3314            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3315        streamID.num_streams++;
3316    }
3317
3318    if(request->input_buffer == NULL) {
3319        /* Parse the settings:
3320         * - For every request in NORMAL MODE
3321         * - For every request in HFR mode during preview only case
3322         * - For first request of every batch in HFR mode during video
3323         * recording. In batchmode the same settings except frame number is
3324         * repeated in each request of the batch.
3325         */
3326        if (!mBatchSize ||
3327           (mBatchSize && !isVidBufRequested) ||
3328           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3329            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3330            if (rc < 0) {
3331                ALOGE("%s: fail to set frame parameters", __func__);
3332                pthread_mutex_unlock(&mMutex);
3333                return rc;
3334            }
3335        }
3336        /* For batchMode HFR, setFrameParameters is not called for every
3337         * request. But only frame number of the latest request is parsed.
3338         * Keep track of first and last frame numbers in a batch so that
3339         * metadata for the frame numbers of batch can be duplicated in
3340         * handleBatchMetadta */
3341        if (mBatchSize) {
3342            if (!mToBeQueuedVidBufs) {
3343                //start of the batch
3344                mFirstFrameNumberInBatch = request->frame_number;
3345            }
3346            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3347                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3348                ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3349                return BAD_VALUE;
3350            }
3351        }
3352        if (mNeedSensorRestart) {
3353            /* Unlock the mutex as restartSensor waits on the channels to be
3354             * stopped, which in turn calls stream callback functions -
3355             * handleBufferWithLock and handleMetadataWithLock */
3356            pthread_mutex_unlock(&mMutex);
3357            rc = dynamicUpdateMetaStreamInfo();
3358            if (rc != NO_ERROR) {
3359                ALOGE("%s: Restarting the sensor failed", __func__);
3360                return BAD_VALUE;
3361            }
3362            mNeedSensorRestart = false;
3363            pthread_mutex_lock(&mMutex);
3364        }
3365    } else {
3366
3367        if (request->input_buffer->acquire_fence != -1) {
3368           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3369           close(request->input_buffer->acquire_fence);
3370           if (rc != OK) {
3371              ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3372              pthread_mutex_unlock(&mMutex);
3373              return rc;
3374           }
3375        }
3376    }
3377
3378    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3379        mLastCustIntentFrmNum = frameNumber;
3380    }
3381    /* Update pending request list and pending buffers map */
3382    PendingRequestInfo pendingRequest;
3383    pendingRequestIterator latestRequest;
3384    pendingRequest.frame_number = frameNumber;
3385    pendingRequest.num_buffers = request->num_output_buffers;
3386    pendingRequest.request_id = request_id;
3387    pendingRequest.blob_request = blob_request;
3388    pendingRequest.timestamp = 0;
3389    pendingRequest.bUrgentReceived = 0;
3390    if (request->input_buffer) {
3391        pendingRequest.input_buffer =
3392                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3393        *(pendingRequest.input_buffer) = *(request->input_buffer);
3394        pInputBuffer = pendingRequest.input_buffer;
3395    } else {
3396       pendingRequest.input_buffer = NULL;
3397       pInputBuffer = NULL;
3398    }
3399
3400    pendingRequest.pipeline_depth = 0;
3401    pendingRequest.partial_result_cnt = 0;
3402    extractJpegMetadata(mCurJpegMeta, request);
3403    pendingRequest.jpegMetadata = mCurJpegMeta;
3404    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3405
3406    //extract capture intent
3407    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3408        mCaptureIntent =
3409                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3410    }
3411    pendingRequest.capture_intent = mCaptureIntent;
3412
3413    for (size_t i = 0; i < request->num_output_buffers; i++) {
3414        RequestedBufferInfo requestedBuf;
3415        memset(&requestedBuf, 0, sizeof(requestedBuf));
3416        requestedBuf.stream = request->output_buffers[i].stream;
3417        requestedBuf.buffer = NULL;
3418        pendingRequest.buffers.push_back(requestedBuf);
3419
3420        // Add to buffer handle the pending buffers list
3421        PendingBufferInfo bufferInfo;
3422        bufferInfo.frame_number = frameNumber;
3423        bufferInfo.buffer = request->output_buffers[i].buffer;
3424        bufferInfo.stream = request->output_buffers[i].stream;
3425        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3426        mPendingBuffersMap.num_buffers++;
3427        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3428        CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3429                __func__, frameNumber, bufferInfo.buffer,
3430                channel->getStreamTypeMask(), bufferInfo.stream->format);
3431    }
3432    latestRequest = mPendingRequestsList.insert(
3433            mPendingRequestsList.end(), pendingRequest);
3434    if(mFlush) {
3435        pthread_mutex_unlock(&mMutex);
3436        return NO_ERROR;
3437    }
3438
3439    // Notify metadata channel we receive a request
3440    mMetadataChannel->request(NULL, frameNumber);
3441
3442    if(request->input_buffer != NULL){
3443        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3444        if (NO_ERROR != rc) {
3445            ALOGE("%s: fail to set reproc parameters", __func__);
3446            pthread_mutex_unlock(&mMutex);
3447            return rc;
3448        }
3449    }
3450
3451    // Call request on other streams
3452    uint32_t streams_need_metadata = 0;
3453    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3454    for (size_t i = 0; i < request->num_output_buffers; i++) {
3455        const camera3_stream_buffer_t& output = request->output_buffers[i];
3456        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3457
3458        if (channel == NULL) {
3459            ALOGE("%s: invalid channel pointer for stream", __func__);
3460            continue;
3461        }
3462
3463        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3464            if(request->input_buffer != NULL){
3465                rc = channel->request(output.buffer, frameNumber,
3466                        pInputBuffer, &mReprocMeta);
3467                if (rc < 0) {
3468                    ALOGE("%s: Fail to request on picture channel", __func__);
3469                    pthread_mutex_unlock(&mMutex);
3470                    return rc;
3471                }
3472            } else {
3473                CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3474                        __LINE__, output.buffer, frameNumber);
3475                if (!request->settings) {
3476                    rc = channel->request(output.buffer, frameNumber,
3477                            NULL, mPrevParameters);
3478                } else {
3479                    rc = channel->request(output.buffer, frameNumber,
3480                            NULL, mParameters);
3481                }
3482                if (rc < 0) {
3483                    ALOGE("%s: Fail to request on picture channel", __func__);
3484                    pthread_mutex_unlock(&mMutex);
3485                    return rc;
3486                }
3487                pendingBufferIter->need_metadata = true;
3488                streams_need_metadata++;
3489            }
3490        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3491            bool needMetadata = false;
3492            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3493            rc = yuvChannel->request(output.buffer, frameNumber,
3494                    pInputBuffer,
3495                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3496            if (rc < 0) {
3497                ALOGE("%s: Fail to request on YUV channel", __func__);
3498                pthread_mutex_unlock(&mMutex);
3499                return rc;
3500            }
3501            pendingBufferIter->need_metadata = needMetadata;
3502            if (needMetadata)
3503                streams_need_metadata += 1;
3504            CDBG("%s: calling YUV channel request, need_metadata is %d",
3505                    __func__, needMetadata);
3506        } else {
3507            CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3508                __LINE__, output.buffer, frameNumber);
3509            rc = channel->request(output.buffer, frameNumber);
3510            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3511                    && mBatchSize) {
3512                mToBeQueuedVidBufs++;
3513                if (mToBeQueuedVidBufs == mBatchSize) {
3514                    channel->queueBatchBuf();
3515                }
3516            }
3517            if (rc < 0) {
3518                ALOGE("%s: request failed", __func__);
3519                pthread_mutex_unlock(&mMutex);
3520                return rc;
3521            }
3522        }
3523        pendingBufferIter++;
3524    }
3525
3526    //If 2 streams have need_metadata set to true, fail the request, unless
3527    //we copy/reference count the metadata buffer
3528    if (streams_need_metadata > 1) {
3529        ALOGE("%s: not supporting request in which two streams requires"
3530                " 2 HAL metadata for reprocessing", __func__);
3531        pthread_mutex_unlock(&mMutex);
3532        return -EINVAL;
3533    }
3534
3535    if(request->input_buffer == NULL) {
3536        /* Set the parameters to backend:
3537         * - For every request in NORMAL MODE
3538         * - For every request in HFR mode during preview only case
3539         * - Once every batch in HFR mode during video recording
3540         */
3541        if (!mBatchSize ||
3542           (mBatchSize && !isVidBufRequested) ||
3543           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3544            CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3545                    __func__, mBatchSize, isVidBufRequested,
3546                    mToBeQueuedVidBufs);
3547            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3548                    mParameters);
3549            if (rc < 0) {
3550                ALOGE("%s: set_parms failed", __func__);
3551            }
3552            /* reset to zero coz, the batch is queued */
3553            mToBeQueuedVidBufs = 0;
3554            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3555        }
3556        mPendingLiveRequest++;
3557    }
3558
3559    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3560
3561    mFirstRequest = false;
3562    // Added a timed condition wait
3563    struct timespec ts;
3564    uint8_t isValidTimeout = 1;
3565    rc = clock_gettime(CLOCK_REALTIME, &ts);
3566    if (rc < 0) {
3567      isValidTimeout = 0;
3568      ALOGE("%s: Error reading the real time clock!!", __func__);
3569    }
3570    else {
3571      // Make timeout as 5 sec for request to be honored
3572      ts.tv_sec += 5;
3573    }
3574    //Block on conditional variable
3575    if (mBatchSize) {
3576        /* For HFR, more buffers are dequeued upfront to improve the performance */
3577        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3578        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3579    }
3580    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3581        if (!isValidTimeout) {
3582            CDBG("%s: Blocking on conditional wait", __func__);
3583            pthread_cond_wait(&mRequestCond, &mMutex);
3584        }
3585        else {
3586            CDBG("%s: Blocking on timed conditional wait", __func__);
3587            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3588            if (rc == ETIMEDOUT) {
3589                rc = -ENODEV;
3590                ALOGE("%s: Unblocked on timeout!!!!", __func__);
3591                break;
3592            }
3593        }
3594        CDBG("%s: Unblocked", __func__);
3595        if (mWokenUpByDaemon) {
3596            mWokenUpByDaemon = false;
3597            if (mPendingLiveRequest < maxInFlightRequests)
3598                break;
3599        }
3600    }
3601    pthread_mutex_unlock(&mMutex);
3602
3603    return rc;
3604}
3605
3606/*===========================================================================
3607 * FUNCTION   : dump
3608 *
3609 * DESCRIPTION:
3610 *
3611 * PARAMETERS :
3612 *
3613 *
3614 * RETURN     :
3615 *==========================================================================*/
3616void QCamera3HardwareInterface::dump(int fd)
3617{
3618    pthread_mutex_lock(&mMutex);
3619    dprintf(fd, "\n Camera HAL3 information Begin \n");
3620
3621    dprintf(fd, "\nNumber of pending requests: %zu \n",
3622        mPendingRequestsList.size());
3623    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3624    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3625    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3626    for(pendingRequestIterator i = mPendingRequestsList.begin();
3627            i != mPendingRequestsList.end(); i++) {
3628        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3629        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3630        i->input_buffer);
3631    }
3632    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3633                mPendingBuffersMap.num_buffers);
3634    dprintf(fd, "-------+------------------\n");
3635    dprintf(fd, " Frame | Stream type mask \n");
3636    dprintf(fd, "-------+------------------\n");
3637    for(List<PendingBufferInfo>::iterator i =
3638        mPendingBuffersMap.mPendingBufferList.begin();
3639        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3640        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3641        dprintf(fd, " %5d | %11d \n",
3642                i->frame_number, channel->getStreamTypeMask());
3643    }
3644    dprintf(fd, "-------+------------------\n");
3645
3646    dprintf(fd, "\nPending frame drop list: %zu\n",
3647        mPendingFrameDropList.size());
3648    dprintf(fd, "-------+-----------\n");
3649    dprintf(fd, " Frame | Stream ID \n");
3650    dprintf(fd, "-------+-----------\n");
3651    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3652        i != mPendingFrameDropList.end(); i++) {
3653        dprintf(fd, " %5d | %9d \n",
3654            i->frame_number, i->stream_ID);
3655    }
3656    dprintf(fd, "-------+-----------\n");
3657
3658    dprintf(fd, "\n Camera HAL3 information End \n");
3659
3660    /* use dumpsys media.camera as trigger to send update debug level event */
3661    mUpdateDebugLevel = true;
3662    pthread_mutex_unlock(&mMutex);
3663    return;
3664}
3665
3666/*===========================================================================
3667 * FUNCTION   : flush
3668 *
3669 * DESCRIPTION:
3670 *
3671 * PARAMETERS :
3672 *
3673 *
3674 * RETURN     :
3675 *==========================================================================*/
3676int QCamera3HardwareInterface::flush()
3677{
3678    ATRACE_CALL();
3679    int32_t rc = NO_ERROR;
3680
3681    CDBG("%s: Unblocking Process Capture Request", __func__);
3682    pthread_mutex_lock(&mMutex);
3683    mFlush = true;
3684    pthread_mutex_unlock(&mMutex);
3685
3686    rc = stopAllChannels();
3687    if (rc < 0) {
3688        ALOGE("%s: stopAllChannels failed", __func__);
3689        return rc;
3690    }
3691    if (mChannelHandle) {
3692        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
3693                mChannelHandle);
3694    }
3695
3696    // Reset bundle info
3697    rc = setBundleInfo();
3698    if (rc < 0) {
3699        ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3700        return rc;
3701    }
3702
3703    // Mutex Lock
3704    pthread_mutex_lock(&mMutex);
3705
3706    // Unblock process_capture_request
3707    mPendingLiveRequest = 0;
3708    pthread_cond_signal(&mRequestCond);
3709
3710    rc = notifyErrorForPendingRequests();
3711    if (rc < 0) {
3712        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3713        pthread_mutex_unlock(&mMutex);
3714        return rc;
3715    }
3716
3717    mFlush = false;
3718
3719    // Start the Streams/Channels
3720    rc = startAllChannels();
3721    if (rc < 0) {
3722        ALOGE("%s: startAllChannels failed", __func__);
3723        pthread_mutex_unlock(&mMutex);
3724        return rc;
3725    }
3726
3727    if (mChannelHandle) {
3728        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3729                    mChannelHandle);
3730        if (rc < 0) {
3731            ALOGE("%s: start_channel failed", __func__);
3732            pthread_mutex_unlock(&mMutex);
3733            return rc;
3734        }
3735    }
3736
3737    pthread_mutex_unlock(&mMutex);
3738
3739    return 0;
3740}
3741
3742/*===========================================================================
3743 * FUNCTION   : captureResultCb
3744 *
3745 * DESCRIPTION: Callback handler for all capture result
3746 *              (streams, as well as metadata)
3747 *
3748 * PARAMETERS :
3749 *   @metadata : metadata information
3750 *   @buffer   : actual gralloc buffer to be returned to frameworks.
3751 *               NULL if metadata.
3752 *
3753 * RETURN     : NONE
3754 *==========================================================================*/
3755void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
3756                camera3_stream_buffer_t *buffer, uint32_t frame_number)
3757{
3758    if (metadata_buf) {
3759        if (mBatchSize) {
3760            handleBatchMetadata(metadata_buf,
3761                    true /* free_and_bufdone_meta_buf */);
3762        } else { /* mBatchSize = 0 */
3763            hdrPlusPerfLock(metadata_buf);
3764            pthread_mutex_lock(&mMutex);
3765            handleMetadataWithLock(metadata_buf,
3766                    true /* free_and_bufdone_meta_buf */);
3767            pthread_mutex_unlock(&mMutex);
3768        }
3769    } else {
3770        pthread_mutex_lock(&mMutex);
3771        handleBufferWithLock(buffer, frame_number);
3772        pthread_mutex_unlock(&mMutex);
3773    }
3774    return;
3775}
3776
3777/*===========================================================================
3778 * FUNCTION   : getReprocessibleOutputStreamId
3779 *
3780 * DESCRIPTION: Get source output stream id for the input reprocess stream
3781 *              based on size and format, which would be the largest
3782 *              output stream if an input stream exists.
3783 *
3784 * PARAMETERS :
3785 *   @id      : return the stream id if found
3786 *
3787 * RETURN     : int32_t type of status
3788 *              NO_ERROR  -- success
3789 *              none-zero failure code
3790 *==========================================================================*/
3791int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
3792{
3793    stream_info_t* stream = NULL;
3794
3795    /* check if any output or bidirectional stream with the same size and format
3796       and return that stream */
3797    if ((mInputStreamInfo.dim.width > 0) &&
3798            (mInputStreamInfo.dim.height > 0)) {
3799        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3800                it != mStreamInfo.end(); it++) {
3801
3802            camera3_stream_t *stream = (*it)->stream;
3803            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
3804                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
3805                    (stream->format == mInputStreamInfo.format)) {
3806                // Usage flag for an input stream and the source output stream
3807                // may be different.
3808                CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
3809                CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
3810                        __func__, stream->usage, mInputStreamInfo.usage);
3811
3812                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
3813                if (channel != NULL && channel->mStreams[0]) {
3814                    id = channel->mStreams[0]->getMyServerID();
3815                    return NO_ERROR;
3816                }
3817            }
3818        }
3819    } else {
3820        CDBG("%s: No input stream, so no reprocessible output stream", __func__);
3821    }
3822    return NAME_NOT_FOUND;
3823}
3824
3825/*===========================================================================
3826 * FUNCTION   : lookupFwkName
3827 *
3828 * DESCRIPTION: In case the enum is not same in fwk and backend
3829 *              make sure the parameter is correctly propogated
3830 *
3831 * PARAMETERS  :
3832 *   @arr      : map between the two enums
3833 *   @len      : len of the map
3834 *   @hal_name : name of the hal_parm to map
3835 *
3836 * RETURN     : int type of status
3837 *              fwk_name  -- success
3838 *              none-zero failure code
3839 *==========================================================================*/
3840template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
3841        size_t len, halType hal_name)
3842{
3843
3844    for (size_t i = 0; i < len; i++) {
3845        if (arr[i].hal_name == hal_name) {
3846            return arr[i].fwk_name;
3847        }
3848    }
3849
3850    /* Not able to find matching framework type is not necessarily
3851     * an error case. This happens when mm-camera supports more attributes
3852     * than the frameworks do */
3853    CDBG_HIGH("%s: Cannot find matching framework type", __func__);
3854    return NAME_NOT_FOUND;
3855}
3856
3857/*===========================================================================
3858 * FUNCTION   : lookupHalName
3859 *
3860 * DESCRIPTION: In case the enum is not same in fwk and backend
3861 *              make sure the parameter is correctly propogated
3862 *
3863 * PARAMETERS  :
3864 *   @arr      : map between the two enums
3865 *   @len      : len of the map
3866 *   @fwk_name : name of the hal_parm to map
3867 *
3868 * RETURN     : int32_t type of status
3869 *              hal_name  -- success
3870 *              none-zero failure code
3871 *==========================================================================*/
3872template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
3873        size_t len, fwkType fwk_name)
3874{
3875    for (size_t i = 0; i < len; i++) {
3876        if (arr[i].fwk_name == fwk_name) {
3877            return arr[i].hal_name;
3878        }
3879    }
3880
3881    ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
3882    return NAME_NOT_FOUND;
3883}
3884
3885/*===========================================================================
3886 * FUNCTION   : lookupProp
3887 *
3888 * DESCRIPTION: lookup a value by its name
3889 *
3890 * PARAMETERS :
3891 *   @arr     : map between the two enums
3892 *   @len     : size of the map
3893 *   @name    : name to be looked up
3894 *
3895 * RETURN     : Value if found
3896 *              CAM_CDS_MODE_MAX if not found
3897 *==========================================================================*/
3898template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
3899        size_t len, const char *name)
3900{
3901    if (name) {
3902        for (size_t i = 0; i < len; i++) {
3903            if (!strcmp(arr[i].desc, name)) {
3904                return arr[i].val;
3905            }
3906        }
3907    }
3908    return CAM_CDS_MODE_MAX;
3909}
3910
3911/*===========================================================================
3912 *
3913 * DESCRIPTION:
3914 *
3915 * PARAMETERS :
3916 *   @metadata : metadata information from callback
3917 *   @timestamp: metadata buffer timestamp
3918 *   @request_id: request id
3919 *   @jpegMetadata: additional jpeg metadata
3920 *   @pprocDone: whether internal offline postprocsesing is done
3921 *
3922 * RETURN     : camera_metadata_t*
3923 *              metadata in a format specified by fwk
3924 *==========================================================================*/
3925camera_metadata_t*
3926QCamera3HardwareInterface::translateFromHalMetadata(
3927                                 metadata_buffer_t *metadata,
3928                                 nsecs_t timestamp,
3929                                 int32_t request_id,
3930                                 const CameraMetadata& jpegMetadata,
3931                                 uint8_t pipeline_depth,
3932                                 uint8_t capture_intent,
3933                                 bool pprocDone)
3934{
3935    CameraMetadata camMetadata;
3936    camera_metadata_t *resultMetadata;
3937
3938    if (jpegMetadata.entryCount())
3939        camMetadata.append(jpegMetadata);
3940
3941    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
3942    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
3943    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
3944    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
3945
3946    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
3947        int64_t fwk_frame_number = *frame_number;
3948        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
3949    }
3950
3951    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
3952        int32_t fps_range[2];
3953        fps_range[0] = (int32_t)float_range->min_fps;
3954        fps_range[1] = (int32_t)float_range->max_fps;
3955        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3956                                      fps_range, 2);
3957        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
3958            __func__, fps_range[0], fps_range[1]);
3959    }
3960
3961    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
3962        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
3963    }
3964
3965    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
3966        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
3967                METADATA_MAP_SIZE(SCENE_MODES_MAP),
3968                *sceneMode);
3969        if (NAME_NOT_FOUND != val) {
3970            uint8_t fwkSceneMode = (uint8_t)val;
3971            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
3972            CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
3973                    __func__, fwkSceneMode);
3974        }
3975    }
3976
3977    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
3978        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
3979        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
3980    }
3981
3982    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
3983        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
3984        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
3985    }
3986
3987    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
3988        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
3989        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
3990    }
3991
3992    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
3993            CAM_INTF_META_EDGE_MODE, metadata) {
3994        uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
3995        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
3996    }
3997
3998    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
3999        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4000        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4001    }
4002
4003    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4004        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4005    }
4006
4007    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4008        if (0 <= *flashState) {
4009            uint8_t fwk_flashState = (uint8_t) *flashState;
4010            if (!gCamCapability[mCameraId]->flash_available) {
4011                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4012            }
4013            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4014        }
4015    }
4016
4017    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4018        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4019        if (NAME_NOT_FOUND != val) {
4020            uint8_t fwk_flashMode = (uint8_t)val;
4021            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4022        }
4023    }
4024
4025    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4026        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4027        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4028    }
4029
4030    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4031        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4032    }
4033
4034    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4035        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4036    }
4037
4038    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4039        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4040    }
4041
4042    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4043        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4044        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4045    }
4046
4047    /*EIS is currently not hooked up to the app, so set the mode to OFF*/
4048    uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4049    camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
4050
4051    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4052        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4053        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4054    }
4055
4056    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4057        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4058    }
4059
4060    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4061        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4062
4063        CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4064          blackLevelSourcePattern->cam_black_level[0],
4065          blackLevelSourcePattern->cam_black_level[1],
4066          blackLevelSourcePattern->cam_black_level[2],
4067          blackLevelSourcePattern->cam_black_level[3]);
4068    }
4069
4070    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4071        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4072        float fwk_blackLevelInd[4];
4073
4074        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4075        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4076        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4077        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4078
4079        CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4080          blackLevelAppliedPattern->cam_black_level[0],
4081          blackLevelAppliedPattern->cam_black_level[1],
4082          blackLevelAppliedPattern->cam_black_level[2],
4083          blackLevelAppliedPattern->cam_black_level[3]);
4084        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4085        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4086    }
4087
4088
4089    if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4090        gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4091        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4092        for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4093            opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4094        }
4095        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4096                opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4097    }
4098
4099    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4100            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4101        int32_t scalerCropRegion[4];
4102        scalerCropRegion[0] = hScalerCropRegion->left;
4103        scalerCropRegion[1] = hScalerCropRegion->top;
4104        scalerCropRegion[2] = hScalerCropRegion->width;
4105        scalerCropRegion[3] = hScalerCropRegion->height;
4106
4107        // Adjust crop region from sensor output coordinate system to active
4108        // array coordinate system.
4109        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4110                scalerCropRegion[2], scalerCropRegion[3]);
4111
4112        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4113    }
4114
4115    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4116        CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4117        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4118    }
4119
4120    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4121            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4122        CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4123        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4124    }
4125
4126    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4127            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4128        CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4129        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4130                sensorRollingShutterSkew, 1);
4131    }
4132
4133    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4134        CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4135        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4136
4137        //calculate the noise profile based on sensitivity
4138        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4139        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4140        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4141        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4142            noise_profile[i]   = noise_profile_S;
4143            noise_profile[i+1] = noise_profile_O;
4144        }
4145        CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4146                noise_profile_S, noise_profile_O);
4147        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4148                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4149    }
4150
4151    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4152        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4153        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4154    }
4155
4156    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4157        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4158                *faceDetectMode);
4159        if (NAME_NOT_FOUND != val) {
4160            uint8_t fwk_faceDetectMode = (uint8_t)val;
4161            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4162
4163            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4164                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4165                        CAM_INTF_META_FACE_DETECTION, metadata) {
4166                    uint8_t numFaces = MIN(
4167                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4168                    int32_t faceIds[MAX_ROI];
4169                    uint8_t faceScores[MAX_ROI];
4170                    int32_t faceRectangles[MAX_ROI * 4];
4171                    int32_t faceLandmarks[MAX_ROI * 6];
4172                    size_t j = 0, k = 0;
4173
4174                    for (size_t i = 0; i < numFaces; i++) {
4175                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4176                        // Adjust crop region from sensor output coordinate system to active
4177                        // array coordinate system.
4178                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4179                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4180                                rect.width, rect.height);
4181
4182                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4183                                faceRectangles+j, -1);
4184
4185                        // Map the co-ordinate sensor output coordinate system to active
4186                        // array coordinate system.
4187                        cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4188                        mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4189                                face.left_eye_center.y);
4190                        mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4191                                face.right_eye_center.y);
4192                        mCropRegionMapper.toActiveArray(face.mouth_center.x,
4193                                face.mouth_center.y);
4194
4195                        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4196                        j+= 4;
4197                        k+= 6;
4198                    }
4199                    if (numFaces <= 0) {
4200                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4201                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4202                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4203                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4204                    }
4205
4206                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4207                            numFaces);
4208                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4209                            faceRectangles, numFaces * 4U);
4210                    if (fwk_faceDetectMode ==
4211                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4212                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4213                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4214                                faceLandmarks, numFaces * 6U);
4215                   }
4216                }
4217            }
4218        }
4219    }
4220
4221    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4222        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4223        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4224    }
4225
4226    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4227            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4228        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4229        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4230    }
4231
4232    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4233            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4234        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4235                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4236    }
4237
4238    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4239            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4240        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4241                CAM_MAX_SHADING_MAP_HEIGHT);
4242        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4243                CAM_MAX_SHADING_MAP_WIDTH);
4244        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4245                lensShadingMap->lens_shading, 4U * map_width * map_height);
4246    }
4247
4248    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4249        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4250        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4251    }
4252
4253    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4254        //Populate CAM_INTF_META_TONEMAP_CURVES
4255        /* ch0 = G, ch 1 = B, ch 2 = R*/
4256        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4257            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4258                    __func__, tonemap->tonemap_points_cnt,
4259                    CAM_MAX_TONEMAP_CURVE_SIZE);
4260            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4261        }
4262
4263        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4264                        &tonemap->curves[0].tonemap_points[0][0],
4265                        tonemap->tonemap_points_cnt * 2);
4266
4267        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4268                        &tonemap->curves[1].tonemap_points[0][0],
4269                        tonemap->tonemap_points_cnt * 2);
4270
4271        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4272                        &tonemap->curves[2].tonemap_points[0][0],
4273                        tonemap->tonemap_points_cnt * 2);
4274    }
4275
4276    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4277            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4278        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4279                CC_GAINS_COUNT);
4280    }
4281
4282    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4283            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4284        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4285                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4286                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4287    }
4288
4289    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4290            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4291        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4292            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4293                    __func__, toneCurve->tonemap_points_cnt,
4294                    CAM_MAX_TONEMAP_CURVE_SIZE);
4295            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4296        }
4297        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4298                (float*)toneCurve->curve.tonemap_points,
4299                toneCurve->tonemap_points_cnt * 2);
4300    }
4301
4302    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4303            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4304        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4305                predColorCorrectionGains->gains, 4);
4306    }
4307
4308    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4309            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4310        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4311                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4312                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4313    }
4314
4315    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4316        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4317    }
4318
4319    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4320        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4321        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4322    }
4323
4324    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4325        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4326        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4327    }
4328
4329    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4330        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4331                *effectMode);
4332        if (NAME_NOT_FOUND != val) {
4333            uint8_t fwk_effectMode = (uint8_t)val;
4334            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4335        }
4336    }
4337
4338    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4339            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4340        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4341                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4342        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4343            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4344        }
4345        int32_t fwk_testPatternData[4];
4346        fwk_testPatternData[0] = testPatternData->r;
4347        fwk_testPatternData[3] = testPatternData->b;
4348        switch (gCamCapability[mCameraId]->color_arrangement) {
4349        case CAM_FILTER_ARRANGEMENT_RGGB:
4350        case CAM_FILTER_ARRANGEMENT_GRBG:
4351            fwk_testPatternData[1] = testPatternData->gr;
4352            fwk_testPatternData[2] = testPatternData->gb;
4353            break;
4354        case CAM_FILTER_ARRANGEMENT_GBRG:
4355        case CAM_FILTER_ARRANGEMENT_BGGR:
4356            fwk_testPatternData[2] = testPatternData->gr;
4357            fwk_testPatternData[1] = testPatternData->gb;
4358            break;
4359        default:
4360            ALOGE("%s: color arrangement %d is not supported", __func__,
4361                gCamCapability[mCameraId]->color_arrangement);
4362            break;
4363        }
4364        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4365    }
4366
4367    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4368        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4369    }
4370
4371    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4372        String8 str((const char *)gps_methods);
4373        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4374    }
4375
4376    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4377        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4378    }
4379
4380    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4381        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4382    }
4383
4384    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4385        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4386        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4387    }
4388
4389    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4390        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4391        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4392    }
4393
4394    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4395        int32_t fwk_thumb_size[2];
4396        fwk_thumb_size[0] = thumb_size->width;
4397        fwk_thumb_size[1] = thumb_size->height;
4398        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4399    }
4400
4401    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4402        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4403                privateData,
4404                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4405    }
4406
4407    if (metadata->is_tuning_params_valid) {
4408        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4409        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4410        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4411
4412
4413        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4414                sizeof(uint32_t));
4415        data += sizeof(uint32_t);
4416
4417        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4418                sizeof(uint32_t));
4419        CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4420        data += sizeof(uint32_t);
4421
4422        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4423                sizeof(uint32_t));
4424        CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4425        data += sizeof(uint32_t);
4426
4427        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4428                sizeof(uint32_t));
4429        CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4430        data += sizeof(uint32_t);
4431
4432        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4433                sizeof(uint32_t));
4434        CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4435        data += sizeof(uint32_t);
4436
4437        metadata->tuning_params.tuning_mod3_data_size = 0;
4438        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4439                sizeof(uint32_t));
4440        CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4441        data += sizeof(uint32_t);
4442
4443        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4444                TUNING_SENSOR_DATA_MAX);
4445        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4446                count);
4447        data += count;
4448
4449        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4450                TUNING_VFE_DATA_MAX);
4451        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4452                count);
4453        data += count;
4454
4455        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4456                TUNING_CPP_DATA_MAX);
4457        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4458                count);
4459        data += count;
4460
4461        count = MIN(metadata->tuning_params.tuning_cac_data_size,
4462                TUNING_CAC_DATA_MAX);
4463        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4464                count);
4465        data += count;
4466
4467        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4468                (int32_t *)(void *)tuning_meta_data_blob,
4469                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4470    }
4471
4472    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4473            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4474        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4475                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4476                NEUTRAL_COL_POINTS);
4477    }
4478
4479    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4480        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4481        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4482    }
4483
4484    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4485        int32_t aeRegions[REGIONS_TUPLE_COUNT];
4486        // Adjust crop region from sensor output coordinate system to active
4487        // array coordinate system.
4488        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4489                hAeRegions->rect.width, hAeRegions->rect.height);
4490
4491        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4492        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4493                REGIONS_TUPLE_COUNT);
4494        CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4495                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4496                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4497                hAeRegions->rect.height);
4498    }
4499
4500    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4501        uint8_t fwk_afState = (uint8_t) *afState;
4502        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4503        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4504    }
4505
4506    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4507        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4508    }
4509
4510    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4511        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4512    }
4513
4514    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4515        uint8_t fwk_lensState = *lensState;
4516        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4517    }
4518
4519    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4520        /*af regions*/
4521        int32_t afRegions[REGIONS_TUPLE_COUNT];
4522        // Adjust crop region from sensor output coordinate system to active
4523        // array coordinate system.
4524        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4525                hAfRegions->rect.width, hAfRegions->rect.height);
4526
4527        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4528        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4529                REGIONS_TUPLE_COUNT);
4530        CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4531                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4532                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4533                hAfRegions->rect.height);
4534    }
4535
4536    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4537        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4538                *hal_ab_mode);
4539        if (NAME_NOT_FOUND != val) {
4540            uint8_t fwk_ab_mode = (uint8_t)val;
4541            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4542        }
4543    }
4544
4545    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4546        int val = lookupFwkName(SCENE_MODES_MAP,
4547                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4548        if (NAME_NOT_FOUND != val) {
4549            uint8_t fwkBestshotMode = (uint8_t)val;
4550            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4551            CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4552        } else {
4553            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4554        }
4555    }
4556
4557    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4558         uint8_t fwk_mode = (uint8_t) *mode;
4559         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4560    }
4561
4562    /* Constant metadata values to be update*/
4563    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4564    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4565
4566    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4567    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4568
4569    int32_t hotPixelMap[2];
4570    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4571
4572    // CDS
4573    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4574        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4575    }
4576
4577    // TNR
4578    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4579        uint8_t tnr_enable       = tnr->denoise_enable;
4580        int32_t tnr_process_type = (int32_t)tnr->process_plates;
4581
4582        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4583        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4584    }
4585
4586    // Reprocess crop data
4587    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4588        uint8_t cnt = crop_data->num_of_streams;
4589        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4590            // mm-qcamera-daemon only posts crop_data for streams
4591            // not linked to pproc. So no valid crop metadata is not
4592            // necessarily an error case.
4593            CDBG("%s: No valid crop metadata entries", __func__);
4594        } else {
4595            uint32_t reproc_stream_id;
4596            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4597                CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4598            } else {
4599                int rc = NO_ERROR;
4600                Vector<int32_t> roi_map;
4601                int32_t *crop = new int32_t[cnt*4];
4602                if (NULL == crop) {
4603                   rc = NO_MEMORY;
4604                }
4605                if (NO_ERROR == rc) {
4606                    int32_t streams_found = 0;
4607                    for (size_t i = 0; i < cnt; i++) {
4608                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4609                            if (pprocDone) {
4610                                // HAL already does internal reprocessing,
4611                                // either via reprocessing before JPEG encoding,
4612                                // or offline postprocessing for pproc bypass case.
4613                                crop[0] = 0;
4614                                crop[1] = 0;
4615                                crop[2] = mInputStreamInfo.dim.width;
4616                                crop[3] = mInputStreamInfo.dim.height;
4617                            } else {
4618                                crop[0] = crop_data->crop_info[i].crop.left;
4619                                crop[1] = crop_data->crop_info[i].crop.top;
4620                                crop[2] = crop_data->crop_info[i].crop.width;
4621                                crop[3] = crop_data->crop_info[i].crop.height;
4622                            }
4623                            roi_map.add(crop_data->crop_info[i].roi_map.left);
4624                            roi_map.add(crop_data->crop_info[i].roi_map.top);
4625                            roi_map.add(crop_data->crop_info[i].roi_map.width);
4626                            roi_map.add(crop_data->crop_info[i].roi_map.height);
4627                            streams_found++;
4628                            CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4629                                    __func__,
4630                                    crop[0], crop[1], crop[2], crop[3]);
4631                            CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4632                                    __func__,
4633                                    crop_data->crop_info[i].roi_map.left,
4634                                    crop_data->crop_info[i].roi_map.top,
4635                                    crop_data->crop_info[i].roi_map.width,
4636                                    crop_data->crop_info[i].roi_map.height);
4637                            break;
4638
4639                       }
4640                    }
4641                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4642                            &streams_found, 1);
4643                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
4644                            crop, (size_t)(streams_found * 4));
4645                    if (roi_map.array()) {
4646                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4647                                roi_map.array(), roi_map.size());
4648                    }
4649               }
4650               if (crop) {
4651                   delete [] crop;
4652               }
4653            }
4654        }
4655    }
4656
4657    IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4658        int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4659                *cacMode);
4660        if (NAME_NOT_FOUND != val) {
4661            uint8_t fwkCacMode = (uint8_t)val;
4662            camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4663        } else {
4664            ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4665        }
4666    }
4667
4668    // Post blob of cam_cds_data through vendor tag.
4669    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4670        uint8_t cnt = cdsInfo->num_of_streams;
4671        cam_cds_data_t cdsDataOverride;
4672        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4673        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4674        cdsDataOverride.num_of_streams = 1;
4675        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4676            uint32_t reproc_stream_id;
4677            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4678                CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
4679            } else {
4680                for (size_t i = 0; i < cnt; i++) {
4681                    if (cdsInfo->cds_info[i].stream_id ==
4682                            reproc_stream_id) {
4683                        cdsDataOverride.cds_info[0].cds_enable =
4684                                cdsInfo->cds_info[i].cds_enable;
4685                        break;
4686                    }
4687                }
4688            }
4689        } else {
4690            CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
4691        }
4692        camMetadata.update(QCAMERA3_CDS_INFO,
4693                (uint8_t *)&cdsDataOverride,
4694                sizeof(cam_cds_data_t));
4695    }
4696
4697    // Ldaf calibration data
4698    if (!mLdafCalibExist) {
4699        IF_META_AVAILABLE(uint32_t, ldafCalib,
4700                CAM_INTF_META_LDAF_EXIF, metadata) {
4701            mLdafCalibExist = true;
4702            mLdafCalib[0] = ldafCalib[0];
4703            mLdafCalib[1] = ldafCalib[1];
4704            CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
4705                    ldafCalib[0], ldafCalib[1]);
4706        }
4707    }
4708
4709    resultMetadata = camMetadata.release();
4710    return resultMetadata;
4711}
4712
4713/*===========================================================================
4714 * FUNCTION   : saveExifParams
4715 *
4716 * DESCRIPTION:
4717 *
4718 * PARAMETERS :
4719 *   @metadata : metadata information from callback
4720 *
4721 * RETURN     : none
4722 *
4723 *==========================================================================*/
4724void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
4725{
4726    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
4727            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
4728        mExifParams.ae_debug_params = *ae_exif_debug_params;
4729        mExifParams.ae_debug_params_valid = TRUE;
4730    }
4731    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
4732            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
4733        mExifParams.awb_debug_params = *awb_exif_debug_params;
4734        mExifParams.awb_debug_params_valid = TRUE;
4735    }
4736    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
4737            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
4738        mExifParams.af_debug_params = *af_exif_debug_params;
4739        mExifParams.af_debug_params_valid = TRUE;
4740    }
4741    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
4742            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
4743        mExifParams.asd_debug_params = *asd_exif_debug_params;
4744        mExifParams.asd_debug_params_valid = TRUE;
4745    }
4746    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
4747            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
4748        mExifParams.stats_debug_params = *stats_exif_debug_params;
4749        mExifParams.stats_debug_params_valid = TRUE;
4750    }
4751}
4752
4753/*===========================================================================
4754 * FUNCTION   : get3AExifParams
4755 *
4756 * DESCRIPTION:
4757 *
4758 * PARAMETERS : none
4759 *
4760 *
4761 * RETURN     : mm_jpeg_exif_params_t
4762 *
4763 *==========================================================================*/
4764mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
4765{
4766    return mExifParams;
4767}
4768
4769/*===========================================================================
4770 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
4771 *
4772 * DESCRIPTION:
4773 *
4774 * PARAMETERS :
4775 *   @metadata : metadata information from callback
4776 *
4777 * RETURN     : camera_metadata_t*
4778 *              metadata in a format specified by fwk
4779 *==========================================================================*/
4780camera_metadata_t*
4781QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
4782                                (metadata_buffer_t *metadata)
4783{
4784    CameraMetadata camMetadata;
4785    camera_metadata_t *resultMetadata;
4786
4787
4788    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
4789        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
4790        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
4791        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
4792    }
4793
4794    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
4795        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
4796                &aecTrigger->trigger, 1);
4797        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
4798                &aecTrigger->trigger_id, 1);
4799        CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
4800                __func__, aecTrigger->trigger);
4801        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
4802                aecTrigger->trigger_id);
4803    }
4804
4805    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
4806        uint8_t fwk_ae_state = (uint8_t) *ae_state;
4807        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
4808        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
4809    }
4810
4811    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4812        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4813        if (NAME_NOT_FOUND != val) {
4814            uint8_t fwkAfMode = (uint8_t)val;
4815            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4816            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
4817        } else {
4818            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__,
4819                    val);
4820        }
4821    }
4822
4823    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
4824        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
4825                &af_trigger->trigger, 1);
4826        CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
4827                __func__, af_trigger->trigger);
4828        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
4829        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
4830                af_trigger->trigger_id);
4831    }
4832
4833    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
4834        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4835                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
4836        if (NAME_NOT_FOUND != val) {
4837            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
4838            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
4839            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
4840        } else {
4841            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
4842        }
4843    }
4844
4845    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4846    uint32_t aeMode = CAM_AE_MODE_MAX;
4847    int32_t flashMode = CAM_FLASH_MODE_MAX;
4848    int32_t redeye = -1;
4849    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
4850        aeMode = *pAeMode;
4851    }
4852    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
4853        flashMode = *pFlashMode;
4854    }
4855    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
4856        redeye = *pRedeye;
4857    }
4858
4859    if (1 == redeye) {
4860        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4861        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4862    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
4863        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
4864                flashMode);
4865        if (NAME_NOT_FOUND != val) {
4866            fwk_aeMode = (uint8_t)val;
4867            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4868        } else {
4869            ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
4870        }
4871    } else if (aeMode == CAM_AE_MODE_ON) {
4872        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
4873        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4874    } else if (aeMode == CAM_AE_MODE_OFF) {
4875        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4876        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4877    } else {
4878        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
4879              "flashMode:%d, aeMode:%u!!!",
4880                __func__, redeye, flashMode, aeMode);
4881    }
4882
4883    resultMetadata = camMetadata.release();
4884    return resultMetadata;
4885}
4886
4887/*===========================================================================
4888 * FUNCTION   : dumpMetadataToFile
4889 *
4890 * DESCRIPTION: Dumps tuning metadata to file system
4891 *
4892 * PARAMETERS :
4893 *   @meta           : tuning metadata
4894 *   @dumpFrameCount : current dump frame count
4895 *   @enabled        : Enable mask
4896 *
4897 *==========================================================================*/
4898void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
4899                                                   uint32_t &dumpFrameCount,
4900                                                   bool enabled,
4901                                                   const char *type,
4902                                                   uint32_t frameNumber)
4903{
4904    uint32_t frm_num = 0;
4905
4906    //Some sanity checks
4907    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
4908        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
4909              __func__,
4910              meta.tuning_sensor_data_size,
4911              TUNING_SENSOR_DATA_MAX);
4912        return;
4913    }
4914
4915    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
4916        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
4917              __func__,
4918              meta.tuning_vfe_data_size,
4919              TUNING_VFE_DATA_MAX);
4920        return;
4921    }
4922
4923    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
4924        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
4925              __func__,
4926              meta.tuning_cpp_data_size,
4927              TUNING_CPP_DATA_MAX);
4928        return;
4929    }
4930
4931    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
4932        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
4933              __func__,
4934              meta.tuning_cac_data_size,
4935              TUNING_CAC_DATA_MAX);
4936        return;
4937    }
4938    //
4939
4940    if(enabled){
4941        char timeBuf[FILENAME_MAX];
4942        char buf[FILENAME_MAX];
4943        memset(buf, 0, sizeof(buf));
4944        memset(timeBuf, 0, sizeof(timeBuf));
4945        time_t current_time;
4946        struct tm * timeinfo;
4947        time (&current_time);
4948        timeinfo = localtime (&current_time);
4949        if (timeinfo != NULL) {
4950            strftime (timeBuf, sizeof(timeBuf),
4951                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
4952        }
4953        String8 filePath(timeBuf);
4954        snprintf(buf,
4955                sizeof(buf),
4956                "%dm_%s_%d.bin",
4957                dumpFrameCount,
4958                type,
4959                frameNumber);
4960        filePath.append(buf);
4961        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
4962        if (file_fd >= 0) {
4963            ssize_t written_len = 0;
4964            meta.tuning_data_version = TUNING_DATA_VERSION;
4965            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
4966            written_len += write(file_fd, data, sizeof(uint32_t));
4967            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
4968            CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4969            written_len += write(file_fd, data, sizeof(uint32_t));
4970            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
4971            CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4972            written_len += write(file_fd, data, sizeof(uint32_t));
4973            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
4974            CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4975            written_len += write(file_fd, data, sizeof(uint32_t));
4976            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
4977            CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4978            written_len += write(file_fd, data, sizeof(uint32_t));
4979            meta.tuning_mod3_data_size = 0;
4980            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
4981            CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4982            written_len += write(file_fd, data, sizeof(uint32_t));
4983            size_t total_size = meta.tuning_sensor_data_size;
4984            data = (void *)((uint8_t *)&meta.data);
4985            written_len += write(file_fd, data, total_size);
4986            total_size = meta.tuning_vfe_data_size;
4987            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
4988            written_len += write(file_fd, data, total_size);
4989            total_size = meta.tuning_cpp_data_size;
4990            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
4991            written_len += write(file_fd, data, total_size);
4992            total_size = meta.tuning_cac_data_size;
4993            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
4994            written_len += write(file_fd, data, total_size);
4995            close(file_fd);
4996        }else {
4997            ALOGE("%s: fail to open file for metadata dumping", __func__);
4998        }
4999    }
5000}
5001
5002/*===========================================================================
5003 * FUNCTION   : cleanAndSortStreamInfo
5004 *
5005 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5006 *              and sort them such that raw stream is at the end of the list
5007 *              This is a workaround for camera daemon constraint.
5008 *
5009 * PARAMETERS : None
5010 *
5011 *==========================================================================*/
5012void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5013{
5014    List<stream_info_t *> newStreamInfo;
5015
5016    /*clean up invalid streams*/
5017    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5018            it != mStreamInfo.end();) {
5019        if(((*it)->status) == INVALID){
5020            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5021            delete channel;
5022            free(*it);
5023            it = mStreamInfo.erase(it);
5024        } else {
5025            it++;
5026        }
5027    }
5028
5029    // Move preview/video/callback/snapshot streams into newList
5030    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5031            it != mStreamInfo.end();) {
5032        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5033                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5034                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5035            newStreamInfo.push_back(*it);
5036            it = mStreamInfo.erase(it);
5037        } else
5038            it++;
5039    }
5040    // Move raw streams into newList
5041    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5042            it != mStreamInfo.end();) {
5043        newStreamInfo.push_back(*it);
5044        it = mStreamInfo.erase(it);
5045    }
5046
5047    mStreamInfo = newStreamInfo;
5048}
5049
5050/*===========================================================================
5051 * FUNCTION   : extractJpegMetadata
5052 *
5053 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5054 *              JPEG metadata is cached in HAL, and return as part of capture
5055 *              result when metadata is returned from camera daemon.
5056 *
5057 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5058 *              @request:      capture request
5059 *
5060 *==========================================================================*/
5061void QCamera3HardwareInterface::extractJpegMetadata(
5062        CameraMetadata& jpegMetadata,
5063        const camera3_capture_request_t *request)
5064{
5065    CameraMetadata frame_settings;
5066    frame_settings = request->settings;
5067
5068    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5069        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5070                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5071                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5072
5073    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5074        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5075                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5076                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5077
5078    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5079        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5080                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5081                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5082
5083    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5084        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5085                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5086                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5087
5088    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5089        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5090                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5091                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5092
5093    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5094        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5095                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5096                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5097
5098    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5099        int32_t thumbnail_size[2];
5100        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5101        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5102        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5103            int32_t orientation =
5104                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5105            if ((orientation == 90) || (orientation == 270)) {
5106               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5107               int32_t temp;
5108               temp = thumbnail_size[0];
5109               thumbnail_size[0] = thumbnail_size[1];
5110               thumbnail_size[1] = temp;
5111            }
5112         }
5113         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5114                thumbnail_size,
5115                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5116    }
5117
5118}
5119
5120/*===========================================================================
5121 * FUNCTION   : convertToRegions
5122 *
5123 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5124 *
5125 * PARAMETERS :
5126 *   @rect   : cam_rect_t struct to convert
5127 *   @region : int32_t destination array
5128 *   @weight : if we are converting from cam_area_t, weight is valid
5129 *             else weight = -1
5130 *
5131 *==========================================================================*/
5132void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5133        int32_t *region, int weight)
5134{
5135    region[0] = rect.left;
5136    region[1] = rect.top;
5137    region[2] = rect.left + rect.width;
5138    region[3] = rect.top + rect.height;
5139    if (weight > -1) {
5140        region[4] = weight;
5141    }
5142}
5143
5144/*===========================================================================
5145 * FUNCTION   : convertFromRegions
5146 *
5147 * DESCRIPTION: helper method to convert from array to cam_rect_t
5148 *
5149 * PARAMETERS :
5150 *   @rect   : cam_rect_t struct to convert
5151 *   @region : int32_t destination array
5152 *   @weight : if we are converting from cam_area_t, weight is valid
5153 *             else weight = -1
5154 *
5155 *==========================================================================*/
5156void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5157        const camera_metadata_t *settings, uint32_t tag)
5158{
5159    CameraMetadata frame_settings;
5160    frame_settings = settings;
5161    int32_t x_min = frame_settings.find(tag).data.i32[0];
5162    int32_t y_min = frame_settings.find(tag).data.i32[1];
5163    int32_t x_max = frame_settings.find(tag).data.i32[2];
5164    int32_t y_max = frame_settings.find(tag).data.i32[3];
5165    roi.weight = frame_settings.find(tag).data.i32[4];
5166    roi.rect.left = x_min;
5167    roi.rect.top = y_min;
5168    roi.rect.width = x_max - x_min;
5169    roi.rect.height = y_max - y_min;
5170}
5171
5172/*===========================================================================
5173 * FUNCTION   : resetIfNeededROI
5174 *
5175 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5176 *              crop region
5177 *
5178 * PARAMETERS :
5179 *   @roi       : cam_area_t struct to resize
5180 *   @scalerCropRegion : cam_crop_region_t region to compare against
5181 *
5182 *
5183 *==========================================================================*/
5184bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5185                                                 const cam_crop_region_t* scalerCropRegion)
5186{
5187    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5188    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5189    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5190    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5191
5192    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5193     * without having this check the calculations below to validate if the roi
5194     * is inside scalar crop region will fail resulting in the roi not being
5195     * reset causing algorithm to continue to use stale roi window
5196     */
5197    if (roi->weight == 0) {
5198        return true;
5199    }
5200
5201    if ((roi_x_max < scalerCropRegion->left) ||
5202        // right edge of roi window is left of scalar crop's left edge
5203        (roi_y_max < scalerCropRegion->top)  ||
5204        // bottom edge of roi window is above scalar crop's top edge
5205        (roi->rect.left > crop_x_max) ||
5206        // left edge of roi window is beyond(right) of scalar crop's right edge
5207        (roi->rect.top > crop_y_max)){
5208        // top edge of roi windo is above scalar crop's top edge
5209        return false;
5210    }
5211    if (roi->rect.left < scalerCropRegion->left) {
5212        roi->rect.left = scalerCropRegion->left;
5213    }
5214    if (roi->rect.top < scalerCropRegion->top) {
5215        roi->rect.top = scalerCropRegion->top;
5216    }
5217    if (roi_x_max > crop_x_max) {
5218        roi_x_max = crop_x_max;
5219    }
5220    if (roi_y_max > crop_y_max) {
5221        roi_y_max = crop_y_max;
5222    }
5223    roi->rect.width = roi_x_max - roi->rect.left;
5224    roi->rect.height = roi_y_max - roi->rect.top;
5225    return true;
5226}
5227
5228/*===========================================================================
5229 * FUNCTION   : convertLandmarks
5230 *
5231 * DESCRIPTION: helper method to extract the landmarks from face detection info
5232 *
5233 * PARAMETERS :
5234 *   @face   : cam_rect_t struct to convert
5235 *   @landmarks : int32_t destination array
5236 *
5237 *
5238 *==========================================================================*/
5239void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5240{
5241    landmarks[0] = (int32_t)face.left_eye_center.x;
5242    landmarks[1] = (int32_t)face.left_eye_center.y;
5243    landmarks[2] = (int32_t)face.right_eye_center.x;
5244    landmarks[3] = (int32_t)face.right_eye_center.y;
5245    landmarks[4] = (int32_t)face.mouth_center.x;
5246    landmarks[5] = (int32_t)face.mouth_center.y;
5247}
5248
5249#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5250/*===========================================================================
5251 * FUNCTION   : initCapabilities
5252 *
5253 * DESCRIPTION: initialize camera capabilities in static data struct
5254 *
5255 * PARAMETERS :
5256 *   @cameraId  : camera Id
5257 *
5258 * RETURN     : int32_t type of status
5259 *              NO_ERROR  -- success
5260 *              none-zero failure code
5261 *==========================================================================*/
5262int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5263{
5264    int rc = 0;
5265    mm_camera_vtbl_t *cameraHandle = NULL;
5266    QCamera3HeapMemory *capabilityHeap = NULL;
5267
5268    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5269    if (rc || !cameraHandle) {
5270        ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5271        goto open_failed;
5272    }
5273
5274    capabilityHeap = new QCamera3HeapMemory(1);
5275    if (capabilityHeap == NULL) {
5276        ALOGE("%s: creation of capabilityHeap failed", __func__);
5277        goto heap_creation_failed;
5278    }
5279    /* Allocate memory for capability buffer */
5280    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5281    if(rc != OK) {
5282        ALOGE("%s: No memory for cappability", __func__);
5283        goto allocate_failed;
5284    }
5285
5286    /* Map memory for capability buffer */
5287    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5288    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5289                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5290                                capabilityHeap->getFd(0),
5291                                sizeof(cam_capability_t));
5292    if(rc < 0) {
5293        ALOGE("%s: failed to map capability buffer", __func__);
5294        goto map_failed;
5295    }
5296
5297    /* Query Capability */
5298    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5299    if(rc < 0) {
5300        ALOGE("%s: failed to query capability",__func__);
5301        goto query_failed;
5302    }
5303    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5304    if (!gCamCapability[cameraId]) {
5305        ALOGE("%s: out of memory", __func__);
5306        goto query_failed;
5307    }
5308    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5309                                        sizeof(cam_capability_t));
5310    rc = 0;
5311
5312query_failed:
5313    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5314                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5315map_failed:
5316    capabilityHeap->deallocate();
5317allocate_failed:
5318    delete capabilityHeap;
5319heap_creation_failed:
5320    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5321    cameraHandle = NULL;
5322open_failed:
5323    return rc;
5324}
5325
5326/*==========================================================================
5327 * FUNCTION   : get3Aversion
5328 *
5329 * DESCRIPTION: get the Q3A S/W version
5330 *
5331 * PARAMETERS :
5332 *  @sw_version: Reference of Q3A structure which will hold version info upon
5333 *               return
5334 *
5335 * RETURN     : None
5336 *
5337 *==========================================================================*/
5338void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5339{
5340    if(gCamCapability[mCameraId])
5341        sw_version = gCamCapability[mCameraId]->q3a_version;
5342    else
5343        ALOGE("%s:Capability structure NULL!", __func__);
5344}
5345
5346
5347/*===========================================================================
5348 * FUNCTION   : initParameters
5349 *
5350 * DESCRIPTION: initialize camera parameters
5351 *
5352 * PARAMETERS :
5353 *
5354 * RETURN     : int32_t type of status
5355 *              NO_ERROR  -- success
5356 *              none-zero failure code
5357 *==========================================================================*/
5358int QCamera3HardwareInterface::initParameters()
5359{
5360    int rc = 0;
5361
5362    //Allocate Set Param Buffer
5363    mParamHeap = new QCamera3HeapMemory(1);
5364    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5365    if(rc != OK) {
5366        rc = NO_MEMORY;
5367        ALOGE("Failed to allocate SETPARM Heap memory");
5368        delete mParamHeap;
5369        mParamHeap = NULL;
5370        return rc;
5371    }
5372
5373    //Map memory for parameters buffer
5374    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5375            CAM_MAPPING_BUF_TYPE_PARM_BUF,
5376            mParamHeap->getFd(0),
5377            sizeof(metadata_buffer_t));
5378    if(rc < 0) {
5379        ALOGE("%s:failed to map SETPARM buffer",__func__);
5380        rc = FAILED_TRANSACTION;
5381        mParamHeap->deallocate();
5382        delete mParamHeap;
5383        mParamHeap = NULL;
5384        return rc;
5385    }
5386
5387    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5388
5389    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5390    return rc;
5391}
5392
5393/*===========================================================================
5394 * FUNCTION   : deinitParameters
5395 *
5396 * DESCRIPTION: de-initialize camera parameters
5397 *
5398 * PARAMETERS :
5399 *
5400 * RETURN     : NONE
5401 *==========================================================================*/
5402void QCamera3HardwareInterface::deinitParameters()
5403{
5404    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5405            CAM_MAPPING_BUF_TYPE_PARM_BUF);
5406
5407    mParamHeap->deallocate();
5408    delete mParamHeap;
5409    mParamHeap = NULL;
5410
5411    mParameters = NULL;
5412
5413    free(mPrevParameters);
5414    mPrevParameters = NULL;
5415}
5416
5417/*===========================================================================
5418 * FUNCTION   : calcMaxJpegSize
5419 *
5420 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5421 *
5422 * PARAMETERS :
5423 *
5424 * RETURN     : max_jpeg_size
5425 *==========================================================================*/
5426size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5427{
5428    size_t max_jpeg_size = 0;
5429    size_t temp_width, temp_height;
5430    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5431            MAX_SIZES_CNT);
5432    for (size_t i = 0; i < count; i++) {
5433        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5434        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5435        if (temp_width * temp_height > max_jpeg_size ) {
5436            max_jpeg_size = temp_width * temp_height;
5437        }
5438    }
5439    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5440    return max_jpeg_size;
5441}
5442
5443/*===========================================================================
5444 * FUNCTION   : getMaxRawSize
5445 *
5446 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5447 *
5448 * PARAMETERS :
5449 *
5450 * RETURN     : Largest supported Raw Dimension
5451 *==========================================================================*/
5452cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5453{
5454    int max_width = 0;
5455    cam_dimension_t maxRawSize;
5456
5457    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5458    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5459        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5460            max_width = gCamCapability[camera_id]->raw_dim[i].width;
5461            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5462        }
5463    }
5464    return maxRawSize;
5465}
5466
5467
5468/*===========================================================================
5469 * FUNCTION   : calcMaxJpegDim
5470 *
5471 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5472 *
5473 * PARAMETERS :
5474 *
5475 * RETURN     : max_jpeg_dim
5476 *==========================================================================*/
5477cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5478{
5479    cam_dimension_t max_jpeg_dim;
5480    cam_dimension_t curr_jpeg_dim;
5481    max_jpeg_dim.width = 0;
5482    max_jpeg_dim.height = 0;
5483    curr_jpeg_dim.width = 0;
5484    curr_jpeg_dim.height = 0;
5485    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5486        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5487        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5488        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5489            max_jpeg_dim.width * max_jpeg_dim.height ) {
5490            max_jpeg_dim.width = curr_jpeg_dim.width;
5491            max_jpeg_dim.height = curr_jpeg_dim.height;
5492        }
5493    }
5494    return max_jpeg_dim;
5495}
5496
5497/*===========================================================================
5498 * FUNCTION   : addStreamConfig
5499 *
5500 * DESCRIPTION: adds the stream configuration to the array
5501 *
5502 * PARAMETERS :
5503 * @available_stream_configs : pointer to stream configuration array
5504 * @scalar_format            : scalar format
5505 * @dim                      : configuration dimension
5506 * @config_type              : input or output configuration type
5507 *
5508 * RETURN     : NONE
5509 *==========================================================================*/
5510void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5511        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5512{
5513    available_stream_configs.add(scalar_format);
5514    available_stream_configs.add(dim.width);
5515    available_stream_configs.add(dim.height);
5516    available_stream_configs.add(config_type);
5517}
5518
5519
5520/*===========================================================================
5521 * FUNCTION   : initStaticMetadata
5522 *
5523 * DESCRIPTION: initialize the static metadata
5524 *
5525 * PARAMETERS :
5526 *   @cameraId  : camera Id
5527 *
5528 * RETURN     : int32_t type of status
5529 *              0  -- success
5530 *              non-zero failure code
5531 *==========================================================================*/
5532int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5533{
5534    int rc = 0;
5535    CameraMetadata staticInfo;
5536    size_t count = 0;
5537    bool limitedDevice = false;
5538    char prop[PROPERTY_VALUE_MAX];
5539
5540    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5541     * guaranteed, its advertised as limited device */
5542    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5543            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5544
5545    uint8_t supportedHwLvl = limitedDevice ?
5546            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5547            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
5548
5549    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5550            &supportedHwLvl, 1);
5551
5552    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5553    /*HAL 3 only*/
5554    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5555                    &gCamCapability[cameraId]->min_focus_distance, 1);
5556
5557    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5558                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
5559
5560    /*should be using focal lengths but sensor doesn't provide that info now*/
5561    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5562                      &gCamCapability[cameraId]->focal_length,
5563                      1);
5564
5565    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5566                      gCamCapability[cameraId]->apertures,
5567                      gCamCapability[cameraId]->apertures_count);
5568
5569    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5570                gCamCapability[cameraId]->filter_densities,
5571                gCamCapability[cameraId]->filter_densities_count);
5572
5573
5574    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5575                      (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5576                      gCamCapability[cameraId]->optical_stab_modes_count);
5577
5578    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5579            gCamCapability[cameraId]->lens_shading_map_size.height};
5580    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5581                      lens_shading_map_size,
5582                      sizeof(lens_shading_map_size)/sizeof(int32_t));
5583
5584    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5585            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5586
5587    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5588            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5589
5590    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5591            &gCamCapability[cameraId]->max_frame_duration, 1);
5592
5593    camera_metadata_rational baseGainFactor = {
5594            gCamCapability[cameraId]->base_gain_factor.numerator,
5595            gCamCapability[cameraId]->base_gain_factor.denominator};
5596    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5597                      &baseGainFactor, 1);
5598
5599    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5600                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5601
5602    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5603            gCamCapability[cameraId]->pixel_array_size.height};
5604    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5605                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5606
5607    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5608                                                gCamCapability[cameraId]->active_array_size.top,
5609                                                gCamCapability[cameraId]->active_array_size.width,
5610                                                gCamCapability[cameraId]->active_array_size.height};
5611    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5612                      active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5613
5614    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5615            &gCamCapability[cameraId]->white_level, 1);
5616
5617    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5618            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5619
5620    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5621                      &gCamCapability[cameraId]->flash_charge_duration, 1);
5622
5623    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5624                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5625
5626    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
5627    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5628            &timestampSource, 1);
5629
5630    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5631                      &gCamCapability[cameraId]->histogram_size, 1);
5632
5633    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5634            &gCamCapability[cameraId]->max_histogram_count, 1);
5635
5636    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5637            gCamCapability[cameraId]->sharpness_map_size.height};
5638
5639    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5640            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5641
5642    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5643            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5644
5645    int32_t scalar_formats[] = {
5646            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5647            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5648            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5649            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5650            HAL_PIXEL_FORMAT_RAW10,
5651            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5652    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5653    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5654                      scalar_formats,
5655                      scalar_formats_count);
5656
5657    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5658    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5659    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
5660            count, MAX_SIZES_CNT, available_processed_sizes);
5661    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
5662            available_processed_sizes, count * 2);
5663
5664    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
5665    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
5666    makeTable(gCamCapability[cameraId]->raw_dim,
5667            count, MAX_SIZES_CNT, available_raw_sizes);
5668    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
5669            available_raw_sizes, count * 2);
5670
5671    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
5672    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
5673    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
5674            count, MAX_SIZES_CNT, available_fps_ranges);
5675    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5676            available_fps_ranges, count * 2);
5677
5678    camera_metadata_rational exposureCompensationStep = {
5679            gCamCapability[cameraId]->exp_compensation_step.numerator,
5680            gCamCapability[cameraId]->exp_compensation_step.denominator};
5681    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
5682                      &exposureCompensationStep, 1);
5683
5684    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
5685    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5686                      availableVstabModes, sizeof(availableVstabModes));
5687
5688    /*HAL 1 and HAL 3 common*/
5689    float maxZoom = 4;
5690    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5691            &maxZoom, 1);
5692
5693    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
5694    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
5695
5696    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
5697    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
5698        max3aRegions[2] = 0; /* AF not supported */
5699    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
5700            max3aRegions, 3);
5701
5702    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
5703    memset(prop, 0, sizeof(prop));
5704    property_get("persist.camera.facedetect", prop, "1");
5705    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
5706    CDBG("%s: Support face detection mode: %d",
5707            __func__, supportedFaceDetectMode);
5708
5709    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
5710    Vector<uint8_t> availableFaceDetectModes;
5711    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
5712    if (supportedFaceDetectMode == 1) {
5713        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5714    } else if (supportedFaceDetectMode == 2) {
5715        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5716    } else if (supportedFaceDetectMode == 3) {
5717        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5718        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5719    } else {
5720        maxFaces = 0;
5721    }
5722    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5723            availableFaceDetectModes.array(),
5724            availableFaceDetectModes.size());
5725    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
5726            (int32_t *)&maxFaces, 1);
5727
5728    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
5729                                           gCamCapability[cameraId]->exposure_compensation_max};
5730    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
5731            exposureCompensationRange,
5732            sizeof(exposureCompensationRange)/sizeof(int32_t));
5733
5734    uint8_t lensFacing = (facingBack) ?
5735            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
5736    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
5737
5738    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5739                      available_thumbnail_sizes,
5740                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
5741
5742    /*all sizes will be clubbed into this tag*/
5743    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
5744    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5745    size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
5746            count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
5747            gCamCapability[cameraId]->max_downscale_factor);
5748    /*android.scaler.availableStreamConfigurations*/
5749    size_t max_stream_configs_size = count * scalar_formats_count * 4;
5750    Vector<int32_t> available_stream_configs;
5751    cam_dimension_t active_array_dim;
5752    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
5753    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
5754    /* Add input/output stream configurations for each scalar formats*/
5755    for (size_t j = 0; j < scalar_formats_count; j++) {
5756        switch (scalar_formats[j]) {
5757        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5758        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5759        case HAL_PIXEL_FORMAT_RAW10:
5760            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5761                addStreamConfig(available_stream_configs, scalar_formats[j],
5762                        gCamCapability[cameraId]->raw_dim[i],
5763                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5764            }
5765            break;
5766        case HAL_PIXEL_FORMAT_BLOB:
5767            cam_dimension_t jpeg_size;
5768            for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
5769                jpeg_size.width  = available_jpeg_sizes[i*2];
5770                jpeg_size.height = available_jpeg_sizes[i*2+1];
5771                addStreamConfig(available_stream_configs, scalar_formats[j],
5772                        jpeg_size,
5773                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5774            }
5775            break;
5776        case HAL_PIXEL_FORMAT_YCbCr_420_888:
5777        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
5778        default:
5779            cam_dimension_t largest_picture_size;
5780            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
5781            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5782                addStreamConfig(available_stream_configs, scalar_formats[j],
5783                        gCamCapability[cameraId]->picture_sizes_tbl[i],
5784                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5785                /* Book keep largest */
5786                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
5787                        >= largest_picture_size.width &&
5788                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
5789                        >= largest_picture_size.height)
5790                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
5791            }
5792            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
5793            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
5794                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5795                 addStreamConfig(available_stream_configs, scalar_formats[j],
5796                         largest_picture_size,
5797                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
5798            }
5799            break;
5800        }
5801    }
5802
5803    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5804                      available_stream_configs.array(), available_stream_configs.size());
5805    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5806    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5807
5808    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5809    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5810
5811    /* android.scaler.availableMinFrameDurations */
5812    int64_t available_min_durations[max_stream_configs_size];
5813    size_t idx = 0;
5814    for (size_t j = 0; j < scalar_formats_count; j++) {
5815        switch (scalar_formats[j]) {
5816        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5817        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5818        case HAL_PIXEL_FORMAT_RAW10:
5819            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5820                available_min_durations[idx] = scalar_formats[j];
5821                available_min_durations[idx+1] =
5822                    gCamCapability[cameraId]->raw_dim[i].width;
5823                available_min_durations[idx+2] =
5824                    gCamCapability[cameraId]->raw_dim[i].height;
5825                available_min_durations[idx+3] =
5826                    gCamCapability[cameraId]->raw_min_duration[i];
5827                idx+=4;
5828            }
5829            break;
5830        default:
5831            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5832                available_min_durations[idx] = scalar_formats[j];
5833                available_min_durations[idx+1] =
5834                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5835                available_min_durations[idx+2] =
5836                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5837                available_min_durations[idx+3] =
5838                    gCamCapability[cameraId]->picture_min_duration[i];
5839                idx+=4;
5840            }
5841            break;
5842        }
5843    }
5844    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
5845                      &available_min_durations[0], idx);
5846
5847    Vector<int32_t> available_hfr_configs;
5848    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
5849        int32_t fps = 0;
5850        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
5851        case CAM_HFR_MODE_60FPS:
5852            fps = 60;
5853            break;
5854        case CAM_HFR_MODE_90FPS:
5855            fps = 90;
5856            break;
5857        case CAM_HFR_MODE_120FPS:
5858            fps = 120;
5859            break;
5860        case CAM_HFR_MODE_150FPS:
5861            fps = 150;
5862            break;
5863        case CAM_HFR_MODE_180FPS:
5864            fps = 180;
5865            break;
5866        case CAM_HFR_MODE_210FPS:
5867            fps = 210;
5868            break;
5869        case CAM_HFR_MODE_240FPS:
5870            fps = 240;
5871            break;
5872        case CAM_HFR_MODE_480FPS:
5873            fps = 480;
5874            break;
5875        case CAM_HFR_MODE_OFF:
5876        case CAM_HFR_MODE_MAX:
5877        default:
5878            break;
5879        }
5880
5881        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
5882        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
5883            /* For each HFR frame rate, need to advertise one variable fps range
5884             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
5885             * [120, 120]. While camcorder preview alone is running [30, 120] is
5886             * set by the app. When video recording is started, [120, 120] is
5887             * set. This way sensor configuration does not change when recording
5888             * is started */
5889
5890            /* (width, height, fps_min, fps_max, batch_size_max) */
5891            available_hfr_configs.add(
5892                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5893            available_hfr_configs.add(
5894                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5895            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
5896            available_hfr_configs.add(fps);
5897            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5898
5899            /* (width, height, fps_min, fps_max, batch_size_max) */
5900            available_hfr_configs.add(
5901                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5902            available_hfr_configs.add(
5903                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5904            available_hfr_configs.add(fps);
5905            available_hfr_configs.add(fps);
5906            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5907       }
5908    }
5909    //Advertise HFR capability only if the property is set
5910    memset(prop, 0, sizeof(prop));
5911    property_get("persist.camera.hal3hfr.enable", prop, "1");
5912    uint8_t hfrEnable = (uint8_t)atoi(prop);
5913
5914    if(hfrEnable && available_hfr_configs.array()) {
5915        staticInfo.update(
5916                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
5917                available_hfr_configs.array(), available_hfr_configs.size());
5918    }
5919
5920    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
5921    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
5922                      &max_jpeg_size, 1);
5923
5924    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
5925    size_t size = 0;
5926    count = CAM_EFFECT_MODE_MAX;
5927    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
5928    for (size_t i = 0; i < count; i++) {
5929        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5930                gCamCapability[cameraId]->supported_effects[i]);
5931        if (NAME_NOT_FOUND != val) {
5932            avail_effects[size] = (uint8_t)val;
5933            size++;
5934        }
5935    }
5936    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
5937                      avail_effects,
5938                      size);
5939
5940    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
5941    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
5942    size_t supported_scene_modes_cnt = 0;
5943    count = CAM_SCENE_MODE_MAX;
5944    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
5945    for (size_t i = 0; i < count; i++) {
5946        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
5947                CAM_SCENE_MODE_OFF) {
5948            int val = lookupFwkName(SCENE_MODES_MAP,
5949                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
5950                    gCamCapability[cameraId]->supported_scene_modes[i]);
5951            if (NAME_NOT_FOUND != val) {
5952                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
5953                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
5954                supported_scene_modes_cnt++;
5955            }
5956        }
5957    }
5958    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
5959                      avail_scene_modes,
5960                      supported_scene_modes_cnt);
5961
5962    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
5963    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
5964                      supported_scene_modes_cnt,
5965                      CAM_SCENE_MODE_MAX,
5966                      scene_mode_overrides,
5967                      supported_indexes,
5968                      cameraId);
5969
5970    if (supported_scene_modes_cnt == 0) {
5971        supported_scene_modes_cnt = 1;
5972        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
5973    }
5974
5975    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
5976            scene_mode_overrides, supported_scene_modes_cnt * 3);
5977
5978    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
5979                                         ANDROID_CONTROL_MODE_AUTO,
5980                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
5981    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
5982            available_control_modes,
5983            3);
5984
5985    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
5986    size = 0;
5987    count = CAM_ANTIBANDING_MODE_MAX;
5988    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
5989    for (size_t i = 0; i < count; i++) {
5990        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5991                gCamCapability[cameraId]->supported_antibandings[i]);
5992        if (NAME_NOT_FOUND != val) {
5993            avail_antibanding_modes[size] = (uint8_t)val;
5994            size++;
5995        }
5996
5997    }
5998    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
5999                      avail_antibanding_modes,
6000                      size);
6001
6002    uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
6003    size = 0;
6004    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6005    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6006    if (0 == count) {
6007        avail_abberation_modes[0] =
6008                ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6009        size++;
6010    } else {
6011        for (size_t i = 0; i < count; i++) {
6012            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6013                    gCamCapability[cameraId]->aberration_modes[i]);
6014            if (NAME_NOT_FOUND != val) {
6015                avail_abberation_modes[size] = (uint8_t)val;
6016                size++;
6017            } else {
6018                ALOGE("%s: Invalid CAC mode %d", __func__,
6019                        gCamCapability[cameraId]->aberration_modes[i]);
6020                break;
6021            }
6022        }
6023
6024    }
6025    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6026            avail_abberation_modes,
6027            size);
6028
6029    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6030    size = 0;
6031    count = CAM_FOCUS_MODE_MAX;
6032    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6033    for (size_t i = 0; i < count; i++) {
6034        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6035                gCamCapability[cameraId]->supported_focus_modes[i]);
6036        if (NAME_NOT_FOUND != val) {
6037            avail_af_modes[size] = (uint8_t)val;
6038            size++;
6039        }
6040    }
6041    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6042                      avail_af_modes,
6043                      size);
6044
6045    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6046    size = 0;
6047    count = CAM_WB_MODE_MAX;
6048    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6049    for (size_t i = 0; i < count; i++) {
6050        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6051                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6052                gCamCapability[cameraId]->supported_white_balances[i]);
6053        if (NAME_NOT_FOUND != val) {
6054            avail_awb_modes[size] = (uint8_t)val;
6055            size++;
6056        }
6057    }
6058    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6059                      avail_awb_modes,
6060                      size);
6061
6062    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6063    count = CAM_FLASH_FIRING_LEVEL_MAX;
6064    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6065            count);
6066    for (size_t i = 0; i < count; i++) {
6067        available_flash_levels[i] =
6068                gCamCapability[cameraId]->supported_firing_levels[i];
6069    }
6070    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6071            available_flash_levels, count);
6072
6073    uint8_t flashAvailable;
6074    if (gCamCapability[cameraId]->flash_available)
6075        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6076    else
6077        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6078    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6079            &flashAvailable, 1);
6080
6081    Vector<uint8_t> avail_ae_modes;
6082    count = CAM_AE_MODE_MAX;
6083    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6084    for (size_t i = 0; i < count; i++) {
6085        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6086    }
6087    if (flashAvailable) {
6088        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6089        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6090    }
6091    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6092                      avail_ae_modes.array(),
6093                      avail_ae_modes.size());
6094
6095    int32_t sensitivity_range[2];
6096    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6097    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6098    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6099                      sensitivity_range,
6100                      sizeof(sensitivity_range) / sizeof(int32_t));
6101
6102    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6103                      &gCamCapability[cameraId]->max_analog_sensitivity,
6104                      1);
6105
6106    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6107    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6108                      &sensor_orientation,
6109                      1);
6110
6111    int32_t max_output_streams[] = {
6112            MAX_STALLING_STREAMS,
6113            MAX_PROCESSED_STREAMS,
6114            MAX_RAW_STREAMS};
6115    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6116            max_output_streams,
6117            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6118
6119    uint8_t avail_leds = 0;
6120    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6121                      &avail_leds, 0);
6122
6123    uint8_t focus_dist_calibrated;
6124    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6125            gCamCapability[cameraId]->focus_dist_calibrated);
6126    if (NAME_NOT_FOUND != val) {
6127        focus_dist_calibrated = (uint8_t)val;
6128        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6129                     &focus_dist_calibrated, 1);
6130    }
6131
6132    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6133    size = 0;
6134    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6135            MAX_TEST_PATTERN_CNT);
6136    for (size_t i = 0; i < count; i++) {
6137        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6138                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6139        if (NAME_NOT_FOUND != testpatternMode) {
6140            avail_testpattern_modes[size] = testpatternMode;
6141            size++;
6142        }
6143    }
6144    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6145                      avail_testpattern_modes,
6146                      size);
6147
6148    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6149    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6150                      &max_pipeline_depth,
6151                      1);
6152
6153    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6154    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6155                      &partial_result_count,
6156                       1);
6157
6158    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6159    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6160
6161    Vector<uint8_t> available_capabilities;
6162    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6163    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6164    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6165    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6166    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6167    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6168    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6169    if (hfrEnable && available_hfr_configs.array()) {
6170        available_capabilities.add(
6171                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6172    }
6173
6174    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6175        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6176    }
6177    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6178            available_capabilities.array(),
6179            available_capabilities.size());
6180
6181    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6182    //BURST_CAPTURE.
6183    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6184            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6185
6186    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6187            &aeLockAvailable, 1);
6188
6189    //awbLockAvailable to be set to true if capabilities has
6190    //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6191    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6192            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6193
6194    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6195            &awbLockAvailable, 1);
6196
6197    int32_t max_input_streams = 1;
6198    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6199                      &max_input_streams,
6200                      1);
6201
6202    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6203    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6204            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6205            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6206            HAL_PIXEL_FORMAT_YCbCr_420_888};
6207    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6208                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6209
6210    int32_t max_latency = (limitedDevice) ?
6211            CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6212    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6213                      &max_latency,
6214                      1);
6215
6216    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6217                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6218    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6219            available_hot_pixel_modes,
6220            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6221
6222    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6223                                         ANDROID_SHADING_MODE_FAST,
6224                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6225    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6226                      available_shading_modes,
6227                      3);
6228
6229    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6230                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6231    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6232                      available_lens_shading_map_modes,
6233                      2);
6234
6235    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6236                                      ANDROID_EDGE_MODE_FAST,
6237                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6238                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6239    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6240            available_edge_modes,
6241            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6242
6243    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6244                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6245                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6246                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6247                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6248    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6249            available_noise_red_modes,
6250            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6251
6252    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6253                                         ANDROID_TONEMAP_MODE_FAST,
6254                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6255    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6256            available_tonemap_modes,
6257            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6258
6259    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6260    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6261            available_hot_pixel_map_modes,
6262            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6263
6264    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6265            gCamCapability[cameraId]->reference_illuminant1);
6266    if (NAME_NOT_FOUND != val) {
6267        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6268        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6269    }
6270
6271    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6272            gCamCapability[cameraId]->reference_illuminant2);
6273    if (NAME_NOT_FOUND != val) {
6274        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6275        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6276    }
6277
6278    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6279            (void *)gCamCapability[cameraId]->forward_matrix1,
6280            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6281
6282    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6283            (void *)gCamCapability[cameraId]->forward_matrix2,
6284            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6285
6286    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6287            (void *)gCamCapability[cameraId]->color_transform1,
6288            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6289
6290    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6291            (void *)gCamCapability[cameraId]->color_transform2,
6292            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6293
6294    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6295            (void *)gCamCapability[cameraId]->calibration_transform1,
6296            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6297
6298    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6299            (void *)gCamCapability[cameraId]->calibration_transform2,
6300            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6301
6302    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6303       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6304       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6305       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6306       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6307       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6308       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6309       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6310       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6311       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6312       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6313       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6314       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6315       ANDROID_JPEG_GPS_COORDINATES,
6316       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6317       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6318       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6319       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6320       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6321       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6322       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6323       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6324       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6325       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6326       ANDROID_STATISTICS_FACE_DETECT_MODE,
6327       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6328       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6329       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6330       ANDROID_BLACK_LEVEL_LOCK };
6331
6332    size_t request_keys_cnt =
6333            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6334    Vector<int32_t> available_request_keys;
6335    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6336    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6337        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6338    }
6339
6340    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6341            available_request_keys.array(), available_request_keys.size());
6342
6343    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6344       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6345       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6346       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6347       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6348       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6349       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6350       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6351       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6352       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6353       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6354       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6355       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6356       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6357       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6358       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6359       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6360       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6361       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6362       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6363       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6364       ANDROID_STATISTICS_FACE_SCORES};
6365    size_t result_keys_cnt =
6366            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6367
6368    Vector<int32_t> available_result_keys;
6369    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6370    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6371        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6372    }
6373    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6374       available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6375       available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6376    }
6377    if (supportedFaceDetectMode == 1) {
6378        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6379        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6380    } else if ((supportedFaceDetectMode == 2) ||
6381            (supportedFaceDetectMode == 3)) {
6382        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6383        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6384    }
6385    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6386            available_result_keys.array(), available_result_keys.size());
6387
6388    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6389       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6390       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6391       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6392       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6393       ANDROID_SCALER_CROPPING_TYPE,
6394       ANDROID_SYNC_MAX_LATENCY,
6395       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6396       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6397       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6398       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6399       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6400       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6401       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6402       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6403       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6404       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6405       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6406       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6407       ANDROID_LENS_FACING,
6408       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6409       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6410       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6411       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6412       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6413       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6414       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6415       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6416       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6417       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6418       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6419       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6420       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6421       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6422       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6423       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6424       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6425       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6426       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6427       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6428       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6429       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6430       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6431       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6432       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6433       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6434       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6435       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6436       ANDROID_TONEMAP_MAX_CURVE_POINTS,
6437       ANDROID_CONTROL_AVAILABLE_MODES,
6438       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6439       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6440       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6441       ANDROID_SHADING_AVAILABLE_MODES,
6442       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6443    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6444                      available_characteristics_keys,
6445                      sizeof(available_characteristics_keys)/sizeof(int32_t));
6446
6447    /*available stall durations depend on the hw + sw and will be different for different devices */
6448    /*have to add for raw after implementation*/
6449    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6450    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6451
6452    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6453    size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6454            MAX_SIZES_CNT);
6455    size_t available_stall_size = count * 4;
6456    int64_t available_stall_durations[available_stall_size];
6457    idx = 0;
6458    for (uint32_t j = 0; j < stall_formats_count; j++) {
6459       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6460          for (uint32_t i = 0; i < count; i++) {
6461             available_stall_durations[idx]   = stall_formats[j];
6462             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6463             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6464             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6465             idx+=4;
6466          }
6467       } else {
6468          for (uint32_t i = 0; i < raw_count; i++) {
6469             available_stall_durations[idx]   = stall_formats[j];
6470             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6471             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6472             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6473             idx+=4;
6474          }
6475       }
6476    }
6477    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6478                      available_stall_durations,
6479                      idx);
6480    //QCAMERA3_OPAQUE_RAW
6481    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6482    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6483    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6484    case LEGACY_RAW:
6485        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6486            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6487        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6488            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6489        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6490            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6491        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6492        break;
6493    case MIPI_RAW:
6494        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6495            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6496        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6497            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6498        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6499            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6500        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6501        break;
6502    default:
6503        ALOGE("%s: unknown opaque_raw_format %d", __func__,
6504                gCamCapability[cameraId]->opaque_raw_fmt);
6505        break;
6506    }
6507    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6508
6509    int32_t strides[3*raw_count];
6510    for (size_t i = 0; i < raw_count; i++) {
6511        cam_stream_buf_plane_info_t buf_planes;
6512        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6513        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6514        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6515            &gCamCapability[cameraId]->padding_info, &buf_planes);
6516        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6517    }
6518    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6519            3*raw_count);
6520
6521    gStaticMetadata[cameraId] = staticInfo.release();
6522    return rc;
6523}
6524
6525/*===========================================================================
6526 * FUNCTION   : makeTable
6527 *
6528 * DESCRIPTION: make a table of sizes
6529 *
6530 * PARAMETERS :
6531 *
6532 *
6533 *==========================================================================*/
6534void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6535        size_t max_size, int32_t *sizeTable)
6536{
6537    size_t j = 0;
6538    if (size > max_size) {
6539       size = max_size;
6540    }
6541    for (size_t i = 0; i < size; i++) {
6542        sizeTable[j] = dimTable[i].width;
6543        sizeTable[j+1] = dimTable[i].height;
6544        j+=2;
6545    }
6546}
6547
6548/*===========================================================================
6549 * FUNCTION   : makeFPSTable
6550 *
6551 * DESCRIPTION: make a table of fps ranges
6552 *
6553 * PARAMETERS :
6554 *
6555 *==========================================================================*/
6556void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6557        size_t max_size, int32_t *fpsRangesTable)
6558{
6559    size_t j = 0;
6560    if (size > max_size) {
6561       size = max_size;
6562    }
6563    for (size_t i = 0; i < size; i++) {
6564        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6565        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6566        j+=2;
6567    }
6568}
6569
6570/*===========================================================================
6571 * FUNCTION   : makeOverridesList
6572 *
6573 * DESCRIPTION: make a list of scene mode overrides
6574 *
6575 * PARAMETERS :
6576 *
6577 *
6578 *==========================================================================*/
6579void QCamera3HardwareInterface::makeOverridesList(
6580        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6581        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6582{
6583    /*daemon will give a list of overrides for all scene modes.
6584      However we should send the fwk only the overrides for the scene modes
6585      supported by the framework*/
6586    size_t j = 0;
6587    if (size > max_size) {
6588       size = max_size;
6589    }
6590    size_t focus_count = CAM_FOCUS_MODE_MAX;
6591    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6592            focus_count);
6593    for (size_t i = 0; i < size; i++) {
6594        bool supt = false;
6595        size_t index = supported_indexes[i];
6596        overridesList[j] = gCamCapability[camera_id]->flash_available ?
6597                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6598        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6599                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6600                overridesTable[index].awb_mode);
6601        if (NAME_NOT_FOUND != val) {
6602            overridesList[j+1] = (uint8_t)val;
6603        }
6604        uint8_t focus_override = overridesTable[index].af_mode;
6605        for (size_t k = 0; k < focus_count; k++) {
6606           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6607              supt = true;
6608              break;
6609           }
6610        }
6611        if (supt) {
6612            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6613                    focus_override);
6614            if (NAME_NOT_FOUND != val) {
6615                overridesList[j+2] = (uint8_t)val;
6616            }
6617        } else {
6618           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6619        }
6620        j+=3;
6621    }
6622}
6623
6624/*===========================================================================
6625 * FUNCTION   : filterJpegSizes
6626 *
6627 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6628 *              could be downscaled to
6629 *
6630 * PARAMETERS :
6631 *
6632 * RETURN     : length of jpegSizes array
6633 *==========================================================================*/
6634
6635size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6636        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6637        uint8_t downscale_factor)
6638{
6639    if (0 == downscale_factor) {
6640        downscale_factor = 1;
6641    }
6642
6643    int32_t min_width = active_array_size.width / downscale_factor;
6644    int32_t min_height = active_array_size.height / downscale_factor;
6645    size_t jpegSizesCnt = 0;
6646    if (processedSizesCnt > maxCount) {
6647        processedSizesCnt = maxCount;
6648    }
6649    for (size_t i = 0; i < processedSizesCnt; i+=2) {
6650        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
6651            jpegSizes[jpegSizesCnt] = processedSizes[i];
6652            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
6653            jpegSizesCnt += 2;
6654        }
6655    }
6656    return jpegSizesCnt;
6657}
6658
6659/*===========================================================================
6660 * FUNCTION   : getPreviewHalPixelFormat
6661 *
6662 * DESCRIPTION: convert the format to type recognized by framework
6663 *
6664 * PARAMETERS : format : the format from backend
6665 *
6666 ** RETURN    : format recognized by framework
6667 *
6668 *==========================================================================*/
6669int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
6670{
6671    int32_t halPixelFormat;
6672
6673    switch (format) {
6674    case CAM_FORMAT_YUV_420_NV12:
6675        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
6676        break;
6677    case CAM_FORMAT_YUV_420_NV21:
6678        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6679        break;
6680    case CAM_FORMAT_YUV_420_NV21_ADRENO:
6681        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
6682        break;
6683    case CAM_FORMAT_YUV_420_YV12:
6684        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
6685        break;
6686    case CAM_FORMAT_YUV_422_NV16:
6687    case CAM_FORMAT_YUV_422_NV61:
6688    default:
6689        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6690        break;
6691    }
6692    return halPixelFormat;
6693}
6694
6695/*===========================================================================
6696 * FUNCTION   : computeNoiseModelEntryS
6697 *
6698 * DESCRIPTION: function to map a given sensitivity to the S noise
6699 *              model parameters in the DNG noise model.
6700 *
6701 * PARAMETERS : sens : the sensor sensitivity
6702 *
6703 ** RETURN    : S (sensor amplification) noise
6704 *
6705 *==========================================================================*/
6706double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
6707    double s = gCamCapability[mCameraId]->gradient_S * sens +
6708            gCamCapability[mCameraId]->offset_S;
6709    return ((s < 0.0) ? 0.0 : s);
6710}
6711
6712/*===========================================================================
6713 * FUNCTION   : computeNoiseModelEntryO
6714 *
6715 * DESCRIPTION: function to map a given sensitivity to the O noise
6716 *              model parameters in the DNG noise model.
6717 *
6718 * PARAMETERS : sens : the sensor sensitivity
6719 *
6720 ** RETURN    : O (sensor readout) noise
6721 *
6722 *==========================================================================*/
6723double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
6724    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
6725    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
6726            1.0 : (1.0 * sens / max_analog_sens);
6727    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
6728            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
6729    return ((o < 0.0) ? 0.0 : o);
6730}
6731
6732/*===========================================================================
6733 * FUNCTION   : getSensorSensitivity
6734 *
6735 * DESCRIPTION: convert iso_mode to an integer value
6736 *
6737 * PARAMETERS : iso_mode : the iso_mode supported by sensor
6738 *
6739 ** RETURN    : sensitivity supported by sensor
6740 *
6741 *==========================================================================*/
6742int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
6743{
6744    int32_t sensitivity;
6745
6746    switch (iso_mode) {
6747    case CAM_ISO_MODE_100:
6748        sensitivity = 100;
6749        break;
6750    case CAM_ISO_MODE_200:
6751        sensitivity = 200;
6752        break;
6753    case CAM_ISO_MODE_400:
6754        sensitivity = 400;
6755        break;
6756    case CAM_ISO_MODE_800:
6757        sensitivity = 800;
6758        break;
6759    case CAM_ISO_MODE_1600:
6760        sensitivity = 1600;
6761        break;
6762    default:
6763        sensitivity = -1;
6764        break;
6765    }
6766    return sensitivity;
6767}
6768
6769/*===========================================================================
6770 * FUNCTION   : getCamInfo
6771 *
6772 * DESCRIPTION: query camera capabilities
6773 *
6774 * PARAMETERS :
6775 *   @cameraId  : camera Id
6776 *   @info      : camera info struct to be filled in with camera capabilities
6777 *
6778 * RETURN     : int type of status
6779 *              NO_ERROR  -- success
6780 *              none-zero failure code
6781 *==========================================================================*/
6782int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
6783        struct camera_info *info)
6784{
6785    ATRACE_CALL();
6786    int rc = 0;
6787
6788    pthread_mutex_lock(&gCamLock);
6789    if (NULL == gCamCapability[cameraId]) {
6790        rc = initCapabilities(cameraId);
6791        if (rc < 0) {
6792            pthread_mutex_unlock(&gCamLock);
6793            return rc;
6794        }
6795    }
6796
6797    if (NULL == gStaticMetadata[cameraId]) {
6798        rc = initStaticMetadata(cameraId);
6799        if (rc < 0) {
6800            pthread_mutex_unlock(&gCamLock);
6801            return rc;
6802        }
6803    }
6804
6805    switch(gCamCapability[cameraId]->position) {
6806    case CAM_POSITION_BACK:
6807        info->facing = CAMERA_FACING_BACK;
6808        break;
6809
6810    case CAM_POSITION_FRONT:
6811        info->facing = CAMERA_FACING_FRONT;
6812        break;
6813
6814    default:
6815        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
6816        rc = -1;
6817        break;
6818    }
6819
6820
6821    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
6822    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
6823    info->static_camera_characteristics = gStaticMetadata[cameraId];
6824
6825    //For now assume both cameras can operate independently.
6826    info->conflicting_devices = NULL;
6827    info->conflicting_devices_length = 0;
6828
6829    //resource cost is 100 * MIN(1.0, m/M),
6830    //where m is throughput requirement with maximum stream configuration
6831    //and M is CPP maximum throughput.
6832    float max_fps = 0.0;
6833    for (uint32_t i = 0;
6834            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
6835        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
6836            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
6837    }
6838    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
6839            gCamCapability[cameraId]->active_array_size.width *
6840            gCamCapability[cameraId]->active_array_size.height * max_fps /
6841            gCamCapability[cameraId]->max_pixel_bandwidth;
6842    info->resource_cost = 100 * MIN(1.0, ratio);
6843    ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
6844            info->resource_cost);
6845
6846    pthread_mutex_unlock(&gCamLock);
6847    return rc;
6848}
6849
6850/*===========================================================================
6851 * FUNCTION   : translateCapabilityToMetadata
6852 *
6853 * DESCRIPTION: translate the capability into camera_metadata_t
6854 *
6855 * PARAMETERS : type of the request
6856 *
6857 *
6858 * RETURN     : success: camera_metadata_t*
6859 *              failure: NULL
6860 *
6861 *==========================================================================*/
6862camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
6863{
6864    if (mDefaultMetadata[type] != NULL) {
6865        return mDefaultMetadata[type];
6866    }
6867    //first time we are handling this request
6868    //fill up the metadata structure using the wrapper class
6869    CameraMetadata settings;
6870    //translate from cam_capability_t to camera_metadata_tag_t
6871    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
6872    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
6873    int32_t defaultRequestID = 0;
6874    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
6875
6876    /* OIS disable */
6877    char ois_prop[PROPERTY_VALUE_MAX];
6878    memset(ois_prop, 0, sizeof(ois_prop));
6879    property_get("persist.camera.ois.disable", ois_prop, "0");
6880    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
6881
6882    /* Force video to use OIS */
6883    char videoOisProp[PROPERTY_VALUE_MAX];
6884    memset(videoOisProp, 0, sizeof(videoOisProp));
6885    property_get("persist.camera.ois.video", videoOisProp, "1");
6886    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
6887
6888    uint8_t controlIntent = 0;
6889    uint8_t focusMode;
6890    uint8_t vsMode;
6891    uint8_t optStabMode;
6892    uint8_t cacMode;
6893    uint8_t edge_mode;
6894    uint8_t noise_red_mode;
6895    uint8_t tonemap_mode;
6896    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6897    switch (type) {
6898      case CAMERA3_TEMPLATE_PREVIEW:
6899        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
6900        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6901        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6902        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6903        edge_mode = ANDROID_EDGE_MODE_FAST;
6904        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6905        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6906        break;
6907      case CAMERA3_TEMPLATE_STILL_CAPTURE:
6908        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
6909        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6910        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6911        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
6912        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
6913        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
6914        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
6915        break;
6916      case CAMERA3_TEMPLATE_VIDEO_RECORD:
6917        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
6918        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6919        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6920        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6921        edge_mode = ANDROID_EDGE_MODE_FAST;
6922        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6923        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6924        if (forceVideoOis)
6925            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6926        break;
6927      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
6928        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
6929        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6930        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6931        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6932        edge_mode = ANDROID_EDGE_MODE_FAST;
6933        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6934        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6935        if (forceVideoOis)
6936            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6937        break;
6938      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
6939        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
6940        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6941        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6942        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6943        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
6944        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
6945        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6946        break;
6947      case CAMERA3_TEMPLATE_MANUAL:
6948        edge_mode = ANDROID_EDGE_MODE_FAST;
6949        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6950        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6951        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6952        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
6953        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6954        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6955        break;
6956      default:
6957        edge_mode = ANDROID_EDGE_MODE_FAST;
6958        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6959        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6960        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6961        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
6962        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6963        break;
6964    }
6965    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
6966    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
6967    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
6968    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
6969        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6970    }
6971    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
6972
6973    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6974            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
6975        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6976    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6977            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
6978            || ois_disable)
6979        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6980    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
6981
6982    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6983            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
6984
6985    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
6986    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
6987
6988    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
6989    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
6990
6991    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
6992    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
6993
6994    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
6995    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
6996
6997    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
6998    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
6999
7000    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7001    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7002
7003    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7004    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7005
7006    /*flash*/
7007    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7008    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7009
7010    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7011    settings.update(ANDROID_FLASH_FIRING_POWER,
7012            &flashFiringLevel, 1);
7013
7014    /* lens */
7015    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7016    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7017
7018    if (gCamCapability[mCameraId]->filter_densities_count) {
7019        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7020        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7021                        gCamCapability[mCameraId]->filter_densities_count);
7022    }
7023
7024    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7025    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7026
7027    float default_focus_distance = 0;
7028    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7029
7030    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7031    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7032
7033    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7034    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7035
7036    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7037    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7038
7039    /* face detection (default to OFF) */
7040    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7041    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7042
7043    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7044    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7045
7046    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7047    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7048
7049    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7050    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7051
7052    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7053    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7054
7055    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7056    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7057
7058    /* Exposure time(Update the Min Exposure Time)*/
7059    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7060    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7061
7062    /* frame duration */
7063    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7064    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7065
7066    /* sensitivity */
7067    static const int32_t default_sensitivity = 100;
7068    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7069
7070    /*edge mode*/
7071    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7072
7073    /*noise reduction mode*/
7074    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7075
7076    /*color correction mode*/
7077    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7078    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7079
7080    /*transform matrix mode*/
7081    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7082
7083    int32_t scaler_crop_region[4];
7084    scaler_crop_region[0] = 0;
7085    scaler_crop_region[1] = 0;
7086    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7087    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7088    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7089
7090    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7091    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7092
7093    /*focus distance*/
7094    float focus_distance = 0.0;
7095    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7096
7097    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7098    float max_range = 0.0;
7099    float max_fixed_fps = 0.0;
7100    int32_t fps_range[2] = {0, 0};
7101    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7102            i++) {
7103        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7104            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7105        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7106                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7107                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7108            if (range > max_range) {
7109                fps_range[0] =
7110                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7111                fps_range[1] =
7112                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7113                max_range = range;
7114            }
7115        } else {
7116            if (range < 0.01 && max_fixed_fps <
7117                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7118                fps_range[0] =
7119                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7120                fps_range[1] =
7121                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7122                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7123            }
7124        }
7125    }
7126    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7127
7128    /*precapture trigger*/
7129    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7130    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7131
7132    /*af trigger*/
7133    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7134    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7135
7136    /* ae & af regions */
7137    int32_t active_region[] = {
7138            gCamCapability[mCameraId]->active_array_size.left,
7139            gCamCapability[mCameraId]->active_array_size.top,
7140            gCamCapability[mCameraId]->active_array_size.left +
7141                    gCamCapability[mCameraId]->active_array_size.width,
7142            gCamCapability[mCameraId]->active_array_size.top +
7143                    gCamCapability[mCameraId]->active_array_size.height,
7144            0};
7145    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7146            sizeof(active_region) / sizeof(active_region[0]));
7147    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7148            sizeof(active_region) / sizeof(active_region[0]));
7149
7150    /* black level lock */
7151    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7152    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7153
7154    /* lens shading map mode */
7155    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7156    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7157        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7158    }
7159    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7160
7161    //special defaults for manual template
7162    if (type == CAMERA3_TEMPLATE_MANUAL) {
7163        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7164        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7165
7166        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7167        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7168
7169        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7170        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7171
7172        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7173        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7174
7175        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7176        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7177
7178        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7179        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7180    }
7181
7182
7183    /* TNR
7184     * We'll use this location to determine which modes TNR will be set.
7185     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7186     * This is not to be confused with linking on a per stream basis that decision
7187     * is still on per-session basis and will be handled as part of config stream
7188     */
7189    uint8_t tnr_enable = 0;
7190
7191    if (m_bTnrPreview || m_bTnrVideo) {
7192
7193        switch (type) {
7194            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7195            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7196                    tnr_enable = 1;
7197                    break;
7198
7199            default:
7200                    tnr_enable = 0;
7201                    break;
7202        }
7203
7204        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7205        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7206        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7207
7208        CDBG("%s: TNR:%d with process plate %d for template:%d",
7209                            __func__, tnr_enable, tnr_process_type, type);
7210    }
7211
7212    /* CDS default */
7213    char prop[PROPERTY_VALUE_MAX];
7214    memset(prop, 0, sizeof(prop));
7215    property_get("persist.camera.CDS", prop, "Auto");
7216    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7217    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7218    if (CAM_CDS_MODE_MAX == cds_mode) {
7219        cds_mode = CAM_CDS_MODE_AUTO;
7220    }
7221    m_CdsPreference = cds_mode;
7222
7223    /* Disabling CDS in templates which have TNR enabled*/
7224    if (tnr_enable)
7225        cds_mode = CAM_CDS_MODE_OFF;
7226
7227    int32_t mode = cds_mode;
7228    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7229    mDefaultMetadata[type] = settings.release();
7230
7231    return mDefaultMetadata[type];
7232}
7233
7234/*===========================================================================
7235 * FUNCTION   : setFrameParameters
7236 *
7237 * DESCRIPTION: set parameters per frame as requested in the metadata from
7238 *              framework
7239 *
7240 * PARAMETERS :
7241 *   @request   : request that needs to be serviced
7242 *   @streamID : Stream ID of all the requested streams
7243 *   @blob_request: Whether this request is a blob request or not
7244 *
7245 * RETURN     : success: NO_ERROR
7246 *              failure:
7247 *==========================================================================*/
7248int QCamera3HardwareInterface::setFrameParameters(
7249                    camera3_capture_request_t *request,
7250                    cam_stream_ID_t streamID,
7251                    int blob_request,
7252                    uint32_t snapshotStreamId)
7253{
7254    /*translate from camera_metadata_t type to parm_type_t*/
7255    int rc = 0;
7256    int32_t hal_version = CAM_HAL_V3;
7257
7258    clear_metadata_buffer(mParameters);
7259    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7260        ALOGE("%s: Failed to set hal version in the parameters", __func__);
7261        return BAD_VALUE;
7262    }
7263
7264    /*we need to update the frame number in the parameters*/
7265    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7266            request->frame_number)) {
7267        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7268        return BAD_VALUE;
7269    }
7270
7271    /* Update stream id of all the requested buffers */
7272    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7273        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7274        return BAD_VALUE;
7275    }
7276
7277    if (mUpdateDebugLevel) {
7278        uint32_t dummyDebugLevel = 0;
7279        /* The value of dummyDebugLevel is irrelavent. On
7280         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7281        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7282                dummyDebugLevel)) {
7283            ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7284            return BAD_VALUE;
7285        }
7286        mUpdateDebugLevel = false;
7287    }
7288
7289    if(request->settings != NULL){
7290        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7291        if (blob_request)
7292            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7293    }
7294
7295    return rc;
7296}
7297
7298/*===========================================================================
7299 * FUNCTION   : setReprocParameters
7300 *
7301 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7302 *              return it.
7303 *
7304 * PARAMETERS :
7305 *   @request   : request that needs to be serviced
7306 *
7307 * RETURN     : success: NO_ERROR
7308 *              failure:
7309 *==========================================================================*/
7310int32_t QCamera3HardwareInterface::setReprocParameters(
7311        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7312        uint32_t snapshotStreamId)
7313{
7314    /*translate from camera_metadata_t type to parm_type_t*/
7315    int rc = 0;
7316
7317    if (NULL == request->settings){
7318        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7319        return BAD_VALUE;
7320    }
7321
7322    if (NULL == reprocParam) {
7323        ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7324        return BAD_VALUE;
7325    }
7326    clear_metadata_buffer(reprocParam);
7327
7328    /*we need to update the frame number in the parameters*/
7329    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7330            request->frame_number)) {
7331        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7332        return BAD_VALUE;
7333    }
7334
7335    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7336    if (rc < 0) {
7337        ALOGE("%s: Failed to translate reproc request", __func__);
7338        return rc;
7339    }
7340
7341    CameraMetadata frame_settings;
7342    frame_settings = request->settings;
7343    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7344            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7345        int32_t *crop_count =
7346                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7347        int32_t *crop_data =
7348                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7349        int32_t *roi_map =
7350                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7351        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7352            cam_crop_data_t crop_meta;
7353            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7354            crop_meta.num_of_streams = 1;
7355            crop_meta.crop_info[0].crop.left   = crop_data[0];
7356            crop_meta.crop_info[0].crop.top    = crop_data[1];
7357            crop_meta.crop_info[0].crop.width  = crop_data[2];
7358            crop_meta.crop_info[0].crop.height = crop_data[3];
7359
7360            crop_meta.crop_info[0].roi_map.left =
7361                    roi_map[0];
7362            crop_meta.crop_info[0].roi_map.top =
7363                    roi_map[1];
7364            crop_meta.crop_info[0].roi_map.width =
7365                    roi_map[2];
7366            crop_meta.crop_info[0].roi_map.height =
7367                    roi_map[3];
7368
7369            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7370                rc = BAD_VALUE;
7371            }
7372            CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7373                    __func__,
7374                    request->input_buffer->stream,
7375                    crop_meta.crop_info[0].crop.left,
7376                    crop_meta.crop_info[0].crop.top,
7377                    crop_meta.crop_info[0].crop.width,
7378                    crop_meta.crop_info[0].crop.height);
7379            CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7380                    __func__,
7381                    request->input_buffer->stream,
7382                    crop_meta.crop_info[0].roi_map.left,
7383                    crop_meta.crop_info[0].roi_map.top,
7384                    crop_meta.crop_info[0].roi_map.width,
7385                    crop_meta.crop_info[0].roi_map.height);
7386            } else {
7387                ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7388            }
7389    } else {
7390        ALOGE("%s: No crop data from matching output stream", __func__);
7391    }
7392
7393    /* These settings are not needed for regular requests so handle them specially for
7394       reprocess requests; information needed for EXIF tags */
7395    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7396        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7397                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7398        if (NAME_NOT_FOUND != val) {
7399            uint32_t flashMode = (uint32_t)val;
7400            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7401                rc = BAD_VALUE;
7402            }
7403        } else {
7404            ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7405                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7406        }
7407    } else {
7408        CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7409    }
7410
7411    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7412        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7413        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7414            rc = BAD_VALUE;
7415        }
7416    } else {
7417        CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7418    }
7419
7420    return rc;
7421}
7422
7423/*===========================================================================
7424 * FUNCTION   : saveRequestSettings
7425 *
7426 * DESCRIPTION: Add any settings that might have changed to the request settings
7427 *              and save the settings to be applied on the frame
7428 *
7429 * PARAMETERS :
7430 *   @jpegMetadata : the extracted and/or modified jpeg metadata
7431 *   @request      : request with initial settings
7432 *
7433 * RETURN     :
7434 * camera_metadata_t* : pointer to the saved request settings
7435 *==========================================================================*/
7436camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7437        const CameraMetadata &jpegMetadata,
7438        camera3_capture_request_t *request)
7439{
7440    camera_metadata_t *resultMetadata;
7441    CameraMetadata camMetadata;
7442    camMetadata = request->settings;
7443
7444    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7445        int32_t thumbnail_size[2];
7446        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7447        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7448        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7449                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7450    }
7451
7452    resultMetadata = camMetadata.release();
7453    return resultMetadata;
7454}
7455
7456/*===========================================================================
7457 * FUNCTION   : setHalFpsRange
7458 *
7459 * DESCRIPTION: set FPS range parameter
7460 *
7461 *
7462 * PARAMETERS :
7463 *   @settings    : Metadata from framework
7464 *   @hal_metadata: Metadata buffer
7465 *
7466 *
7467 * RETURN     : success: NO_ERROR
7468 *              failure:
7469 *==========================================================================*/
7470int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7471        metadata_buffer_t *hal_metadata)
7472{
7473    int32_t rc = NO_ERROR;
7474    cam_fps_range_t fps_range;
7475    fps_range.min_fps = (float)
7476            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7477    fps_range.max_fps = (float)
7478            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7479    fps_range.video_min_fps = fps_range.min_fps;
7480    fps_range.video_max_fps = fps_range.max_fps;
7481
7482    CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7483            fps_range.min_fps, fps_range.max_fps);
7484    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7485     * follows:
7486     * ---------------------------------------------------------------|
7487     *      Video stream is absent in configure_streams               |
7488     *    (Camcorder preview before the first video record            |
7489     * ---------------------------------------------------------------|
7490     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7491     *                   |             |             | vid_min/max_fps|
7492     * ---------------------------------------------------------------|
7493     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7494     *                   |-------------|-------------|----------------|
7495     *                   |  [240, 240] |     240     |  [240, 240]    |
7496     * ---------------------------------------------------------------|
7497     *     Video stream is present in configure_streams               |
7498     * ---------------------------------------------------------------|
7499     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7500     *                   |             |             | vid_min/max_fps|
7501     * ---------------------------------------------------------------|
7502     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7503     * (camcorder prev   |-------------|-------------|----------------|
7504     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
7505     *  is stopped)      |             |             |                |
7506     * ---------------------------------------------------------------|
7507     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
7508     *                   |-------------|-------------|----------------|
7509     *                   |  [240, 240] |     240     |  [240, 240]    |
7510     * ---------------------------------------------------------------|
7511     * When Video stream is absent in configure_streams,
7512     * preview fps = sensor_fps / batchsize
7513     * Eg: for 240fps at batchSize 4, preview = 60fps
7514     *     for 120fps at batchSize 4, preview = 30fps
7515     *
7516     * When video stream is present in configure_streams, preview fps is as per
7517     * the ratio of preview buffers to video buffers requested in process
7518     * capture request
7519     */
7520    mBatchSize = 0;
7521    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7522        fps_range.min_fps = fps_range.video_max_fps;
7523        fps_range.video_min_fps = fps_range.video_max_fps;
7524        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7525                fps_range.max_fps);
7526        if (NAME_NOT_FOUND != val) {
7527            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7528            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7529                return BAD_VALUE;
7530            }
7531
7532            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7533                /* If batchmode is currently in progress and the fps changes,
7534                 * set the flag to restart the sensor */
7535                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7536                        (mHFRVideoFps != fps_range.max_fps)) {
7537                    mNeedSensorRestart = true;
7538                }
7539                mHFRVideoFps = fps_range.max_fps;
7540                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7541                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7542                    mBatchSize = MAX_HFR_BATCH_SIZE;
7543                }
7544             }
7545            CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7546
7547         }
7548    } else {
7549        /* HFR mode is session param in backend/ISP. This should be reset when
7550         * in non-HFR mode  */
7551        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7552        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7553            return BAD_VALUE;
7554        }
7555    }
7556    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7557        return BAD_VALUE;
7558    }
7559    CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7560            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7561    return rc;
7562}
7563
7564/*===========================================================================
7565 * FUNCTION   : translateToHalMetadata
7566 *
7567 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7568 *
7569 *
7570 * PARAMETERS :
7571 *   @request  : request sent from framework
7572 *
7573 *
7574 * RETURN     : success: NO_ERROR
7575 *              failure:
7576 *==========================================================================*/
7577int QCamera3HardwareInterface::translateToHalMetadata
7578                                  (const camera3_capture_request_t *request,
7579                                   metadata_buffer_t *hal_metadata,
7580                                   uint32_t snapshotStreamId)
7581{
7582    int rc = 0;
7583    CameraMetadata frame_settings;
7584    frame_settings = request->settings;
7585
7586    /* Do not change the order of the following list unless you know what you are
7587     * doing.
7588     * The order is laid out in such a way that parameters in the front of the table
7589     * may be used to override the parameters later in the table. Examples are:
7590     * 1. META_MODE should precede AEC/AWB/AF MODE
7591     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7592     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7593     * 4. Any mode should precede it's corresponding settings
7594     */
7595    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7596        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7597        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7598            rc = BAD_VALUE;
7599        }
7600        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7601        if (rc != NO_ERROR) {
7602            ALOGE("%s: extractSceneMode failed", __func__);
7603        }
7604    }
7605
7606    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7607        uint8_t fwk_aeMode =
7608            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7609        uint8_t aeMode;
7610        int32_t redeye;
7611
7612        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
7613            aeMode = CAM_AE_MODE_OFF;
7614        } else {
7615            aeMode = CAM_AE_MODE_ON;
7616        }
7617        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
7618            redeye = 1;
7619        } else {
7620            redeye = 0;
7621        }
7622
7623        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7624                fwk_aeMode);
7625        if (NAME_NOT_FOUND != val) {
7626            int32_t flashMode = (int32_t)val;
7627            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
7628        }
7629
7630        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
7631        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
7632            rc = BAD_VALUE;
7633        }
7634    }
7635
7636    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
7637        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
7638        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7639                fwk_whiteLevel);
7640        if (NAME_NOT_FOUND != val) {
7641            uint8_t whiteLevel = (uint8_t)val;
7642            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
7643                rc = BAD_VALUE;
7644            }
7645        }
7646    }
7647
7648    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
7649        uint8_t fwk_cacMode =
7650                frame_settings.find(
7651                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
7652        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7653                fwk_cacMode);
7654        if (NAME_NOT_FOUND != val) {
7655            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
7656            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
7657                rc = BAD_VALUE;
7658            }
7659        } else {
7660            ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
7661        }
7662    }
7663
7664    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
7665        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
7666        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7667                fwk_focusMode);
7668        if (NAME_NOT_FOUND != val) {
7669            uint8_t focusMode = (uint8_t)val;
7670            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
7671                rc = BAD_VALUE;
7672            }
7673        }
7674    }
7675
7676    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
7677        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
7678        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
7679                focalDistance)) {
7680            rc = BAD_VALUE;
7681        }
7682    }
7683
7684    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
7685        uint8_t fwk_antibandingMode =
7686                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
7687        int val = lookupHalName(ANTIBANDING_MODES_MAP,
7688                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
7689        if (NAME_NOT_FOUND != val) {
7690            uint32_t hal_antibandingMode = (uint32_t)val;
7691            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
7692                    hal_antibandingMode)) {
7693                rc = BAD_VALUE;
7694            }
7695        }
7696    }
7697
7698    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
7699        int32_t expCompensation = frame_settings.find(
7700                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
7701        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
7702            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
7703        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
7704            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
7705        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
7706                expCompensation)) {
7707            rc = BAD_VALUE;
7708        }
7709    }
7710
7711    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
7712        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
7713        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
7714            rc = BAD_VALUE;
7715        }
7716    }
7717    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
7718        rc = setHalFpsRange(frame_settings, hal_metadata);
7719        if (rc != NO_ERROR) {
7720            ALOGE("%s: setHalFpsRange failed", __func__);
7721        }
7722    }
7723
7724    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
7725        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
7726        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
7727            rc = BAD_VALUE;
7728        }
7729    }
7730
7731    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
7732        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
7733        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7734                fwk_effectMode);
7735        if (NAME_NOT_FOUND != val) {
7736            uint8_t effectMode = (uint8_t)val;
7737            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
7738                rc = BAD_VALUE;
7739            }
7740        }
7741    }
7742
7743    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
7744        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
7745        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
7746                colorCorrectMode)) {
7747            rc = BAD_VALUE;
7748        }
7749    }
7750
7751    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
7752        cam_color_correct_gains_t colorCorrectGains;
7753        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
7754            colorCorrectGains.gains[i] =
7755                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
7756        }
7757        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
7758                colorCorrectGains)) {
7759            rc = BAD_VALUE;
7760        }
7761    }
7762
7763    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
7764        cam_color_correct_matrix_t colorCorrectTransform;
7765        cam_rational_type_t transform_elem;
7766        size_t num = 0;
7767        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
7768           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
7769              transform_elem.numerator =
7770                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
7771              transform_elem.denominator =
7772                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
7773              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
7774              num++;
7775           }
7776        }
7777        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
7778                colorCorrectTransform)) {
7779            rc = BAD_VALUE;
7780        }
7781    }
7782
7783    cam_trigger_t aecTrigger;
7784    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
7785    aecTrigger.trigger_id = -1;
7786    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
7787        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
7788        aecTrigger.trigger =
7789            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
7790        aecTrigger.trigger_id =
7791            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
7792        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
7793                aecTrigger)) {
7794            rc = BAD_VALUE;
7795        }
7796        CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
7797                aecTrigger.trigger, aecTrigger.trigger_id);
7798    }
7799
7800    /*af_trigger must come with a trigger id*/
7801    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
7802        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
7803        cam_trigger_t af_trigger;
7804        af_trigger.trigger =
7805            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
7806        af_trigger.trigger_id =
7807            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
7808        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
7809            rc = BAD_VALUE;
7810        }
7811        CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
7812                af_trigger.trigger, af_trigger.trigger_id);
7813    }
7814
7815    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
7816        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
7817        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
7818            rc = BAD_VALUE;
7819        }
7820    }
7821    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
7822        cam_edge_application_t edge_application;
7823        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
7824        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
7825            edge_application.sharpness = 0;
7826        } else {
7827            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
7828        }
7829        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
7830            rc = BAD_VALUE;
7831        }
7832    }
7833
7834    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7835        int32_t respectFlashMode = 1;
7836        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7837            uint8_t fwk_aeMode =
7838                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7839            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
7840                respectFlashMode = 0;
7841                CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
7842                    __func__);
7843            }
7844        }
7845        if (respectFlashMode) {
7846            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7847                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7848            CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
7849            // To check: CAM_INTF_META_FLASH_MODE usage
7850            if (NAME_NOT_FOUND != val) {
7851                uint8_t flashMode = (uint8_t)val;
7852                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
7853                    rc = BAD_VALUE;
7854                }
7855            }
7856        }
7857    }
7858
7859    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
7860        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
7861        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
7862            rc = BAD_VALUE;
7863        }
7864    }
7865
7866    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
7867        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
7868        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
7869                flashFiringTime)) {
7870            rc = BAD_VALUE;
7871        }
7872    }
7873
7874    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
7875        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
7876        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
7877                hotPixelMode)) {
7878            rc = BAD_VALUE;
7879        }
7880    }
7881
7882    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
7883        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
7884        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
7885                lensAperture)) {
7886            rc = BAD_VALUE;
7887        }
7888    }
7889
7890    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
7891        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
7892        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
7893                filterDensity)) {
7894            rc = BAD_VALUE;
7895        }
7896    }
7897
7898    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
7899        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
7900        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH, focalLength)) {
7901            rc = BAD_VALUE;
7902        }
7903    }
7904
7905    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
7906        uint8_t optStabMode =
7907                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
7908        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE, optStabMode)) {
7909            rc = BAD_VALUE;
7910        }
7911    }
7912
7913    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
7914        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
7915        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
7916                noiseRedMode)) {
7917            rc = BAD_VALUE;
7918        }
7919    }
7920
7921    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
7922        float reprocessEffectiveExposureFactor =
7923            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
7924        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
7925                reprocessEffectiveExposureFactor)) {
7926            rc = BAD_VALUE;
7927        }
7928    }
7929
7930    cam_crop_region_t scalerCropRegion;
7931    bool scalerCropSet = false;
7932    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
7933        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
7934        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
7935        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
7936        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
7937
7938        // Map coordinate system from active array to sensor output.
7939        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
7940                scalerCropRegion.width, scalerCropRegion.height);
7941
7942        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
7943                scalerCropRegion)) {
7944            rc = BAD_VALUE;
7945        }
7946        scalerCropSet = true;
7947    }
7948
7949    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
7950        int64_t sensorExpTime =
7951                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
7952        CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
7953        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
7954                sensorExpTime)) {
7955            rc = BAD_VALUE;
7956        }
7957    }
7958
7959    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
7960        int64_t sensorFrameDuration =
7961                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
7962        int64_t minFrameDuration = getMinFrameDuration(request);
7963        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
7964        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
7965            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
7966        CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
7967        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
7968                sensorFrameDuration)) {
7969            rc = BAD_VALUE;
7970        }
7971    }
7972
7973    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
7974        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
7975        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
7976                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
7977        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
7978                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
7979        CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
7980        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
7981                sensorSensitivity)) {
7982            rc = BAD_VALUE;
7983        }
7984    }
7985
7986    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
7987        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
7988        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
7989            rc = BAD_VALUE;
7990        }
7991    }
7992
7993    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
7994        uint8_t fwk_facedetectMode =
7995                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
7996
7997        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7998                fwk_facedetectMode);
7999
8000        if (NAME_NOT_FOUND != val) {
8001            uint8_t facedetectMode = (uint8_t)val;
8002            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8003                    facedetectMode)) {
8004                rc = BAD_VALUE;
8005            }
8006        }
8007    }
8008
8009    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8010        uint8_t histogramMode =
8011                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8012        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8013                histogramMode)) {
8014            rc = BAD_VALUE;
8015        }
8016    }
8017
8018    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8019        uint8_t sharpnessMapMode =
8020                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8021        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8022                sharpnessMapMode)) {
8023            rc = BAD_VALUE;
8024        }
8025    }
8026
8027    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8028        uint8_t tonemapMode =
8029                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8030        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8031            rc = BAD_VALUE;
8032        }
8033    }
8034    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8035    /*All tonemap channels will have the same number of points*/
8036    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8037        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8038        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8039        cam_rgb_tonemap_curves tonemapCurves;
8040        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8041        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8042            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8043                    __func__, tonemapCurves.tonemap_points_cnt,
8044                    CAM_MAX_TONEMAP_CURVE_SIZE);
8045            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8046        }
8047
8048        /* ch0 = G*/
8049        size_t point = 0;
8050        cam_tonemap_curve_t tonemapCurveGreen;
8051        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8052            for (size_t j = 0; j < 2; j++) {
8053               tonemapCurveGreen.tonemap_points[i][j] =
8054                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8055               point++;
8056            }
8057        }
8058        tonemapCurves.curves[0] = tonemapCurveGreen;
8059
8060        /* ch 1 = B */
8061        point = 0;
8062        cam_tonemap_curve_t tonemapCurveBlue;
8063        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8064            for (size_t j = 0; j < 2; j++) {
8065               tonemapCurveBlue.tonemap_points[i][j] =
8066                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8067               point++;
8068            }
8069        }
8070        tonemapCurves.curves[1] = tonemapCurveBlue;
8071
8072        /* ch 2 = R */
8073        point = 0;
8074        cam_tonemap_curve_t tonemapCurveRed;
8075        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8076            for (size_t j = 0; j < 2; j++) {
8077               tonemapCurveRed.tonemap_points[i][j] =
8078                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8079               point++;
8080            }
8081        }
8082        tonemapCurves.curves[2] = tonemapCurveRed;
8083
8084        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8085                tonemapCurves)) {
8086            rc = BAD_VALUE;
8087        }
8088    }
8089
8090    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8091        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8092        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8093                captureIntent)) {
8094            rc = BAD_VALUE;
8095        }
8096    }
8097
8098    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8099        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8100        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8101                blackLevelLock)) {
8102            rc = BAD_VALUE;
8103        }
8104    }
8105
8106    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8107        uint8_t lensShadingMapMode =
8108                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8109        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8110                lensShadingMapMode)) {
8111            rc = BAD_VALUE;
8112        }
8113    }
8114
8115    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8116        cam_area_t roi;
8117        bool reset = true;
8118        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8119
8120        // Map coordinate system from active array to sensor output.
8121        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8122                roi.rect.height);
8123
8124        if (scalerCropSet) {
8125            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8126        }
8127        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8128            rc = BAD_VALUE;
8129        }
8130    }
8131
8132    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8133        cam_area_t roi;
8134        bool reset = true;
8135        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8136
8137        // Map coordinate system from active array to sensor output.
8138        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8139                roi.rect.height);
8140
8141        if (scalerCropSet) {
8142            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8143        }
8144        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8145            rc = BAD_VALUE;
8146        }
8147    }
8148
8149    if (m_bIs4KVideo) {
8150        /* Override needed for Video template in case of 4K video */
8151        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8152                CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8153            rc = BAD_VALUE;
8154        }
8155    } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8156            frame_settings.exists(QCAMERA3_CDS_MODE)) {
8157        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8158        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8159            ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8160        } else {
8161            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8162                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8163                rc = BAD_VALUE;
8164            }
8165        }
8166    }
8167
8168    // TNR
8169    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8170        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8171        uint8_t b_TnrRequested = 0;
8172        cam_denoise_param_t tnr;
8173        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8174        tnr.process_plates =
8175            (cam_denoise_process_type_t)frame_settings.find(
8176            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8177        b_TnrRequested = tnr.denoise_enable;
8178        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8179            rc = BAD_VALUE;
8180        }
8181    }
8182
8183    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8184        int32_t fwk_testPatternMode =
8185                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8186        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8187                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8188
8189        if (NAME_NOT_FOUND != testPatternMode) {
8190            cam_test_pattern_data_t testPatternData;
8191            memset(&testPatternData, 0, sizeof(testPatternData));
8192            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8193            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8194                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8195                int32_t *fwk_testPatternData =
8196                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8197                testPatternData.r = fwk_testPatternData[0];
8198                testPatternData.b = fwk_testPatternData[3];
8199                switch (gCamCapability[mCameraId]->color_arrangement) {
8200                    case CAM_FILTER_ARRANGEMENT_RGGB:
8201                    case CAM_FILTER_ARRANGEMENT_GRBG:
8202                        testPatternData.gr = fwk_testPatternData[1];
8203                        testPatternData.gb = fwk_testPatternData[2];
8204                        break;
8205                    case CAM_FILTER_ARRANGEMENT_GBRG:
8206                    case CAM_FILTER_ARRANGEMENT_BGGR:
8207                        testPatternData.gr = fwk_testPatternData[2];
8208                        testPatternData.gb = fwk_testPatternData[1];
8209                        break;
8210                    default:
8211                        ALOGE("%s: color arrangement %d is not supported", __func__,
8212                                gCamCapability[mCameraId]->color_arrangement);
8213                        break;
8214                }
8215            }
8216            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8217                    testPatternData)) {
8218                rc = BAD_VALUE;
8219            }
8220        } else {
8221            ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8222                    fwk_testPatternMode);
8223        }
8224    }
8225
8226    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8227        size_t count = 0;
8228        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8229        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8230                gps_coords.data.d, gps_coords.count, count);
8231        if (gps_coords.count != count) {
8232            rc = BAD_VALUE;
8233        }
8234    }
8235
8236    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8237        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8238        size_t count = 0;
8239        const char *gps_methods_src = (const char *)
8240                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8241        memset(gps_methods, '\0', sizeof(gps_methods));
8242        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8243        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8244                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8245        if (GPS_PROCESSING_METHOD_SIZE != count) {
8246            rc = BAD_VALUE;
8247        }
8248    }
8249
8250    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8251        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8252        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8253                gps_timestamp)) {
8254            rc = BAD_VALUE;
8255        }
8256    }
8257
8258    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8259        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8260        cam_rotation_info_t rotation_info;
8261        if (orientation == 0) {
8262           rotation_info.rotation = ROTATE_0;
8263        } else if (orientation == 90) {
8264           rotation_info.rotation = ROTATE_90;
8265        } else if (orientation == 180) {
8266           rotation_info.rotation = ROTATE_180;
8267        } else if (orientation == 270) {
8268           rotation_info.rotation = ROTATE_270;
8269        }
8270        rotation_info.streamId = snapshotStreamId;
8271        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8272        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8273            rc = BAD_VALUE;
8274        }
8275    }
8276
8277    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8278        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8279        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8280            rc = BAD_VALUE;
8281        }
8282    }
8283
8284    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8285        uint32_t thumb_quality = (uint32_t)
8286                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8287        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8288                thumb_quality)) {
8289            rc = BAD_VALUE;
8290        }
8291    }
8292
8293    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8294        cam_dimension_t dim;
8295        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8296        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8297        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8298            rc = BAD_VALUE;
8299        }
8300    }
8301
8302    // Internal metadata
8303    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8304        size_t count = 0;
8305        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8306        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8307                privatedata.data.i32, privatedata.count, count);
8308        if (privatedata.count != count) {
8309            rc = BAD_VALUE;
8310        }
8311    }
8312
8313    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8314        uint8_t* use_av_timer =
8315                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8316        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8317            rc = BAD_VALUE;
8318        }
8319    }
8320
8321    // EV step
8322    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8323            gCamCapability[mCameraId]->exp_compensation_step)) {
8324        rc = BAD_VALUE;
8325    }
8326
8327    // CDS info
8328    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8329        cam_cds_data_t *cdsData = (cam_cds_data_t *)
8330                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8331
8332        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8333                CAM_INTF_META_CDS_DATA, *cdsData)) {
8334            rc = BAD_VALUE;
8335        }
8336    }
8337
8338    return rc;
8339}
8340
8341/*===========================================================================
8342 * FUNCTION   : captureResultCb
8343 *
8344 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8345 *
8346 * PARAMETERS :
8347 *   @frame  : frame information from mm-camera-interface
8348 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8349 *   @userdata: userdata
8350 *
8351 * RETURN     : NONE
8352 *==========================================================================*/
8353void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8354                camera3_stream_buffer_t *buffer,
8355                uint32_t frame_number, void *userdata)
8356{
8357    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8358    if (hw == NULL) {
8359        ALOGE("%s: Invalid hw %p", __func__, hw);
8360        return;
8361    }
8362
8363    hw->captureResultCb(metadata, buffer, frame_number);
8364    return;
8365}
8366
8367
8368/*===========================================================================
8369 * FUNCTION   : initialize
8370 *
8371 * DESCRIPTION: Pass framework callback pointers to HAL
8372 *
8373 * PARAMETERS :
8374 *
8375 *
8376 * RETURN     : Success : 0
8377 *              Failure: -ENODEV
8378 *==========================================================================*/
8379
8380int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8381                                  const camera3_callback_ops_t *callback_ops)
8382{
8383    CDBG("%s: E", __func__);
8384    QCamera3HardwareInterface *hw =
8385        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8386    if (!hw) {
8387        ALOGE("%s: NULL camera device", __func__);
8388        return -ENODEV;
8389    }
8390
8391    int rc = hw->initialize(callback_ops);
8392    CDBG("%s: X", __func__);
8393    return rc;
8394}
8395
8396/*===========================================================================
8397 * FUNCTION   : configure_streams
8398 *
8399 * DESCRIPTION:
8400 *
8401 * PARAMETERS :
8402 *
8403 *
8404 * RETURN     : Success: 0
8405 *              Failure: -EINVAL (if stream configuration is invalid)
8406 *                       -ENODEV (fatal error)
8407 *==========================================================================*/
8408
8409int QCamera3HardwareInterface::configure_streams(
8410        const struct camera3_device *device,
8411        camera3_stream_configuration_t *stream_list)
8412{
8413    CDBG("%s: E", __func__);
8414    QCamera3HardwareInterface *hw =
8415        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8416    if (!hw) {
8417        ALOGE("%s: NULL camera device", __func__);
8418        return -ENODEV;
8419    }
8420    int rc = hw->configureStreams(stream_list);
8421    CDBG("%s: X", __func__);
8422    return rc;
8423}
8424
8425/*===========================================================================
8426 * FUNCTION   : construct_default_request_settings
8427 *
8428 * DESCRIPTION: Configure a settings buffer to meet the required use case
8429 *
8430 * PARAMETERS :
8431 *
8432 *
8433 * RETURN     : Success: Return valid metadata
8434 *              Failure: Return NULL
8435 *==========================================================================*/
8436const camera_metadata_t* QCamera3HardwareInterface::
8437    construct_default_request_settings(const struct camera3_device *device,
8438                                        int type)
8439{
8440
8441    CDBG("%s: E", __func__);
8442    camera_metadata_t* fwk_metadata = NULL;
8443    QCamera3HardwareInterface *hw =
8444        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8445    if (!hw) {
8446        ALOGE("%s: NULL camera device", __func__);
8447        return NULL;
8448    }
8449
8450    fwk_metadata = hw->translateCapabilityToMetadata(type);
8451
8452    CDBG("%s: X", __func__);
8453    return fwk_metadata;
8454}
8455
8456/*===========================================================================
8457 * FUNCTION   : process_capture_request
8458 *
8459 * DESCRIPTION:
8460 *
8461 * PARAMETERS :
8462 *
8463 *
8464 * RETURN     :
8465 *==========================================================================*/
8466int QCamera3HardwareInterface::process_capture_request(
8467                    const struct camera3_device *device,
8468                    camera3_capture_request_t *request)
8469{
8470    CDBG("%s: E", __func__);
8471    QCamera3HardwareInterface *hw =
8472        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8473    if (!hw) {
8474        ALOGE("%s: NULL camera device", __func__);
8475        return -EINVAL;
8476    }
8477
8478    int rc = hw->processCaptureRequest(request);
8479    CDBG("%s: X", __func__);
8480    return rc;
8481}
8482
8483/*===========================================================================
8484 * FUNCTION   : dump
8485 *
8486 * DESCRIPTION:
8487 *
8488 * PARAMETERS :
8489 *
8490 *
8491 * RETURN     :
8492 *==========================================================================*/
8493
8494void QCamera3HardwareInterface::dump(
8495                const struct camera3_device *device, int fd)
8496{
8497    /* Log level property is read when "adb shell dumpsys media.camera" is
8498       called so that the log level can be controlled without restarting
8499       the media server */
8500    getLogLevel();
8501
8502    CDBG("%s: E", __func__);
8503    QCamera3HardwareInterface *hw =
8504        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8505    if (!hw) {
8506        ALOGE("%s: NULL camera device", __func__);
8507        return;
8508    }
8509
8510    hw->dump(fd);
8511    CDBG("%s: X", __func__);
8512    return;
8513}
8514
8515/*===========================================================================
8516 * FUNCTION   : flush
8517 *
8518 * DESCRIPTION:
8519 *
8520 * PARAMETERS :
8521 *
8522 *
8523 * RETURN     :
8524 *==========================================================================*/
8525
8526int QCamera3HardwareInterface::flush(
8527                const struct camera3_device *device)
8528{
8529    int rc;
8530    CDBG("%s: E", __func__);
8531    QCamera3HardwareInterface *hw =
8532        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8533    if (!hw) {
8534        ALOGE("%s: NULL camera device", __func__);
8535        return -EINVAL;
8536    }
8537
8538    rc = hw->flush();
8539    CDBG("%s: X", __func__);
8540    return rc;
8541}
8542
8543/*===========================================================================
8544 * FUNCTION   : close_camera_device
8545 *
8546 * DESCRIPTION:
8547 *
8548 * PARAMETERS :
8549 *
8550 *
8551 * RETURN     :
8552 *==========================================================================*/
8553int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8554{
8555    CDBG("%s: E", __func__);
8556    int ret = NO_ERROR;
8557    QCamera3HardwareInterface *hw =
8558        reinterpret_cast<QCamera3HardwareInterface *>(
8559            reinterpret_cast<camera3_device_t *>(device)->priv);
8560    if (!hw) {
8561        ALOGE("NULL camera device");
8562        return BAD_VALUE;
8563    }
8564    delete hw;
8565
8566    CDBG("%s: X", __func__);
8567    return ret;
8568}
8569
8570/*===========================================================================
8571 * FUNCTION   : getWaveletDenoiseProcessPlate
8572 *
8573 * DESCRIPTION: query wavelet denoise process plate
8574 *
8575 * PARAMETERS : None
8576 *
8577 * RETURN     : WNR prcocess plate value
8578 *==========================================================================*/
8579cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8580{
8581    char prop[PROPERTY_VALUE_MAX];
8582    memset(prop, 0, sizeof(prop));
8583    property_get("persist.denoise.process.plates", prop, "0");
8584    int processPlate = atoi(prop);
8585    switch(processPlate) {
8586    case 0:
8587        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8588    case 1:
8589        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8590    case 2:
8591        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8592    case 3:
8593        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8594    default:
8595        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8596    }
8597}
8598
8599
8600/*===========================================================================
8601 * FUNCTION   : getTemporalDenoiseProcessPlate
8602 *
8603 * DESCRIPTION: query temporal denoise process plate
8604 *
8605 * PARAMETERS : None
8606 *
8607 * RETURN     : TNR prcocess plate value
8608 *==========================================================================*/
8609cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
8610{
8611    char prop[PROPERTY_VALUE_MAX];
8612    memset(prop, 0, sizeof(prop));
8613    property_get("persist.tnr.process.plates", prop, "0");
8614    int processPlate = atoi(prop);
8615    switch(processPlate) {
8616    case 0:
8617        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8618    case 1:
8619        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8620    case 2:
8621        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8622    case 3:
8623        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8624    default:
8625        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8626    }
8627}
8628
8629
8630/*===========================================================================
8631 * FUNCTION   : extractSceneMode
8632 *
8633 * DESCRIPTION: Extract scene mode from frameworks set metadata
8634 *
8635 * PARAMETERS :
8636 *      @frame_settings: CameraMetadata reference
8637 *      @metaMode: ANDROID_CONTORL_MODE
8638 *      @hal_metadata: hal metadata structure
8639 *
8640 * RETURN     : None
8641 *==========================================================================*/
8642int32_t QCamera3HardwareInterface::extractSceneMode(
8643        const CameraMetadata &frame_settings, uint8_t metaMode,
8644        metadata_buffer_t *hal_metadata)
8645{
8646    int32_t rc = NO_ERROR;
8647
8648    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
8649        camera_metadata_ro_entry entry =
8650                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
8651        if (0 == entry.count)
8652            return rc;
8653
8654        uint8_t fwk_sceneMode = entry.data.u8[0];
8655
8656        int val = lookupHalName(SCENE_MODES_MAP,
8657                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
8658                fwk_sceneMode);
8659        if (NAME_NOT_FOUND != val) {
8660            uint8_t sceneMode = (uint8_t)val;
8661            CDBG("%s: sceneMode: %d", __func__, sceneMode);
8662            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8663                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8664                rc = BAD_VALUE;
8665            }
8666        }
8667    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
8668            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
8669        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
8670        CDBG("%s: sceneMode: %d", __func__, sceneMode);
8671        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8672                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8673            rc = BAD_VALUE;
8674        }
8675    }
8676    return rc;
8677}
8678
8679/*===========================================================================
8680 * FUNCTION   : needRotationReprocess
8681 *
8682 * DESCRIPTION: if rotation needs to be done by reprocess in pp
8683 *
8684 * PARAMETERS : none
8685 *
8686 * RETURN     : true: needed
8687 *              false: no need
8688 *==========================================================================*/
8689bool QCamera3HardwareInterface::needRotationReprocess()
8690{
8691    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
8692        // current rotation is not zero, and pp has the capability to process rotation
8693        CDBG_HIGH("%s: need do reprocess for rotation", __func__);
8694        return true;
8695    }
8696
8697    return false;
8698}
8699
8700/*===========================================================================
8701 * FUNCTION   : needReprocess
8702 *
8703 * DESCRIPTION: if reprocess in needed
8704 *
8705 * PARAMETERS : none
8706 *
8707 * RETURN     : true: needed
8708 *              false: no need
8709 *==========================================================================*/
8710bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
8711{
8712    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
8713        // TODO: add for ZSL HDR later
8714        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
8715        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
8716            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
8717            return true;
8718        } else {
8719            CDBG_HIGH("%s: already post processed frame", __func__);
8720            return false;
8721        }
8722    }
8723    return needRotationReprocess();
8724}
8725
8726/*===========================================================================
8727 * FUNCTION   : needJpegRotation
8728 *
8729 * DESCRIPTION: if rotation from jpeg is needed
8730 *
8731 * PARAMETERS : none
8732 *
8733 * RETURN     : true: needed
8734 *              false: no need
8735 *==========================================================================*/
8736bool QCamera3HardwareInterface::needJpegRotation()
8737{
8738   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
8739    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
8740       CDBG("%s: Need Jpeg to do the rotation", __func__);
8741       return true;
8742    }
8743    return false;
8744}
8745
8746/*===========================================================================
8747 * FUNCTION   : addOfflineReprocChannel
8748 *
8749 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
8750 *              coming from input channel
8751 *
8752 * PARAMETERS :
8753 *   @config  : reprocess configuration
8754 *   @inputChHandle : pointer to the input (source) channel
8755 *
8756 *
8757 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
8758 *==========================================================================*/
8759QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
8760        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
8761{
8762    int32_t rc = NO_ERROR;
8763    QCamera3ReprocessChannel *pChannel = NULL;
8764
8765    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
8766            mChannelHandle, mCameraHandle->ops, NULL, config.padding,
8767            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
8768    if (NULL == pChannel) {
8769        ALOGE("%s: no mem for reprocess channel", __func__);
8770        return NULL;
8771    }
8772
8773    rc = pChannel->initialize(IS_TYPE_NONE);
8774    if (rc != NO_ERROR) {
8775        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
8776        delete pChannel;
8777        return NULL;
8778    }
8779
8780    // pp feature config
8781    cam_pp_feature_config_t pp_config;
8782    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
8783
8784    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
8785
8786    rc = pChannel->addReprocStreamsFromSource(pp_config,
8787            config,
8788            IS_TYPE_NONE,
8789            mMetadataChannel);
8790
8791    if (rc != NO_ERROR) {
8792        delete pChannel;
8793        return NULL;
8794    }
8795    return pChannel;
8796}
8797
8798/*===========================================================================
8799 * FUNCTION   : getMobicatMask
8800 *
8801 * DESCRIPTION: returns mobicat mask
8802 *
8803 * PARAMETERS : none
8804 *
8805 * RETURN     : mobicat mask
8806 *
8807 *==========================================================================*/
8808uint8_t QCamera3HardwareInterface::getMobicatMask()
8809{
8810    return m_MobicatMask;
8811}
8812
8813/*===========================================================================
8814 * FUNCTION   : setMobicat
8815 *
8816 * DESCRIPTION: set Mobicat on/off.
8817 *
8818 * PARAMETERS :
8819 *   @params  : none
8820 *
8821 * RETURN     : int32_t type of status
8822 *              NO_ERROR  -- success
8823 *              none-zero failure code
8824 *==========================================================================*/
8825int32_t QCamera3HardwareInterface::setMobicat()
8826{
8827    char value [PROPERTY_VALUE_MAX];
8828    property_get("persist.camera.mobicat", value, "0");
8829    int32_t ret = NO_ERROR;
8830    uint8_t enableMobi = (uint8_t)atoi(value);
8831
8832    if (enableMobi) {
8833        tune_cmd_t tune_cmd;
8834        tune_cmd.type = SET_RELOAD_CHROMATIX;
8835        tune_cmd.module = MODULE_ALL;
8836        tune_cmd.value = TRUE;
8837        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8838                CAM_INTF_PARM_SET_VFE_COMMAND,
8839                tune_cmd);
8840
8841        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8842                CAM_INTF_PARM_SET_PP_COMMAND,
8843                tune_cmd);
8844    }
8845    m_MobicatMask = enableMobi;
8846
8847    return ret;
8848}
8849
8850/*===========================================================================
8851* FUNCTION   : getLogLevel
8852*
8853* DESCRIPTION: Reads the log level property into a variable
8854*
8855* PARAMETERS :
8856*   None
8857*
8858* RETURN     :
8859*   None
8860*==========================================================================*/
8861void QCamera3HardwareInterface::getLogLevel()
8862{
8863    char prop[PROPERTY_VALUE_MAX];
8864    uint32_t globalLogLevel = 0;
8865
8866    property_get("persist.camera.hal.debug", prop, "0");
8867    int val = atoi(prop);
8868    if (0 <= val) {
8869        gCamHal3LogLevel = (uint32_t)val;
8870    }
8871    property_get("persist.camera.global.debug", prop, "0");
8872    val = atoi(prop);
8873    if (0 <= val) {
8874        globalLogLevel = (uint32_t)val;
8875    }
8876
8877    /* Highest log level among hal.logs and global.logs is selected */
8878    if (gCamHal3LogLevel < globalLogLevel)
8879        gCamHal3LogLevel = globalLogLevel;
8880
8881    return;
8882}
8883
8884/*===========================================================================
8885 * FUNCTION   : validateStreamRotations
8886 *
8887 * DESCRIPTION: Check if the rotations requested are supported
8888 *
8889 * PARAMETERS :
8890 *   @stream_list : streams to be configured
8891 *
8892 * RETURN     : NO_ERROR on success
8893 *              -EINVAL on failure
8894 *
8895 *==========================================================================*/
8896int QCamera3HardwareInterface::validateStreamRotations(
8897        camera3_stream_configuration_t *streamList)
8898{
8899    int rc = NO_ERROR;
8900
8901    /*
8902    * Loop through all streams requested in configuration
8903    * Check if unsupported rotations have been requested on any of them
8904    */
8905    for (size_t j = 0; j < streamList->num_streams; j++){
8906        camera3_stream_t *newStream = streamList->streams[j];
8907
8908        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
8909        bool isImplDef = (newStream->format ==
8910                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
8911        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
8912                isImplDef);
8913
8914        if (isRotated && (!isImplDef || isZsl)) {
8915            ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
8916                    "type:%d and stream format:%d", __func__,
8917                    newStream->rotation, newStream->stream_type,
8918                    newStream->format);
8919            rc = -EINVAL;
8920            break;
8921        }
8922    }
8923    return rc;
8924}
8925
8926/*===========================================================================
8927* FUNCTION   : getFlashInfo
8928*
8929* DESCRIPTION: Retrieve information about whether the device has a flash.
8930*
8931* PARAMETERS :
8932*   @cameraId  : Camera id to query
8933*   @hasFlash  : Boolean indicating whether there is a flash device
8934*                associated with given camera
8935*   @flashNode : If a flash device exists, this will be its device node.
8936*
8937* RETURN     :
8938*   None
8939*==========================================================================*/
8940void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
8941        bool& hasFlash,
8942        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
8943{
8944    cam_capability_t* camCapability = gCamCapability[cameraId];
8945    if (NULL == camCapability) {
8946        hasFlash = false;
8947        flashNode[0] = '\0';
8948    } else {
8949        hasFlash = camCapability->flash_available;
8950        strlcpy(flashNode,
8951                (char*)camCapability->flash_dev_name,
8952                QCAMERA_MAX_FILEPATH_LENGTH);
8953    }
8954}
8955
8956/*===========================================================================
8957* FUNCTION   : getEepromVersionInfo
8958*
8959* DESCRIPTION: Retrieve version info of the sensor EEPROM data
8960*
8961* PARAMETERS : None
8962*
8963* RETURN     : string describing EEPROM version
8964*              "\0" if no such info available
8965*==========================================================================*/
8966const char *QCamera3HardwareInterface::getEepromVersionInfo()
8967{
8968    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
8969}
8970
8971/*===========================================================================
8972* FUNCTION   : getLdafCalib
8973*
8974* DESCRIPTION: Retrieve Laser AF calibration data
8975*
8976* PARAMETERS : None
8977*
8978* RETURN     : Two uint32_t describing laser AF calibration data
8979*              NULL if none is available.
8980*==========================================================================*/
8981const uint32_t *QCamera3HardwareInterface::getLdafCalib()
8982{
8983    if (mLdafCalibExist) {
8984        return &mLdafCalib[0];
8985    } else {
8986        return NULL;
8987    }
8988}
8989
8990/*===========================================================================
8991 * FUNCTION   : dynamicUpdateMetaStreamInfo
8992 *
8993 * DESCRIPTION: This function:
8994 *             (1) stops all the channels
8995 *             (2) returns error on pending requests and buffers
8996 *             (3) sends metastream_info in setparams
8997 *             (4) starts all channels
8998 *             This is useful when sensor has to be restarted to apply any
8999 *             settings such as frame rate from a different sensor mode
9000 *
9001 * PARAMETERS : None
9002 *
9003 * RETURN     : NO_ERROR on success
9004 *              Error codes on failure
9005 *
9006 *==========================================================================*/
9007int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9008{
9009    ATRACE_CALL();
9010    int rc = NO_ERROR;
9011
9012    CDBG("%s: E", __func__);
9013
9014    rc = stopAllChannels();
9015    if (rc < 0) {
9016        ALOGE("%s: stopAllChannels failed", __func__);
9017        return rc;
9018    }
9019
9020    rc = notifyErrorForPendingRequests();
9021    if (rc < 0) {
9022        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9023        return rc;
9024    }
9025
9026    /* Send meta stream info once again so that ISP can start */
9027    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9028            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9029    CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9030    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9031            mParameters);
9032    if (rc < 0) {
9033        ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9034                __func__);
9035    }
9036
9037    rc = startAllChannels();
9038    if (rc < 0) {
9039        ALOGE("%s: startAllChannels failed", __func__);
9040        return rc;
9041    }
9042
9043    CDBG("%s:%d X", __func__, __LINE__);
9044    return rc;
9045}
9046
9047/*===========================================================================
9048 * FUNCTION   : stopAllChannels
9049 *
9050 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9051 *
9052 * PARAMETERS : None
9053 *
9054 * RETURN     : NO_ERROR on success
9055 *              Error codes on failure
9056 *
9057 *==========================================================================*/
9058int32_t QCamera3HardwareInterface::stopAllChannels()
9059{
9060    int32_t rc = NO_ERROR;
9061
9062    // Stop the Streams/Channels
9063    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9064        it != mStreamInfo.end(); it++) {
9065        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9066        channel->stop();
9067        (*it)->status = INVALID;
9068    }
9069
9070    if (mSupportChannel) {
9071        mSupportChannel->stop();
9072    }
9073    if (mAnalysisChannel) {
9074        mAnalysisChannel->stop();
9075    }
9076    if (mRawDumpChannel) {
9077        mRawDumpChannel->stop();
9078    }
9079    if (mMetadataChannel) {
9080        /* If content of mStreamInfo is not 0, there is metadata stream */
9081        mMetadataChannel->stop();
9082    }
9083
9084    CDBG("%s:%d All channels stopped", __func__, __LINE__);
9085    return rc;
9086}
9087
9088/*===========================================================================
9089 * FUNCTION   : startAllChannels
9090 *
9091 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9092 *
9093 * PARAMETERS : None
9094 *
9095 * RETURN     : NO_ERROR on success
9096 *              Error codes on failure
9097 *
9098 *==========================================================================*/
9099int32_t QCamera3HardwareInterface::startAllChannels()
9100{
9101    int32_t rc = NO_ERROR;
9102
9103    CDBG("%s: Start all channels ", __func__);
9104    // Start the Streams/Channels
9105    if (mMetadataChannel) {
9106        /* If content of mStreamInfo is not 0, there is metadata stream */
9107        rc = mMetadataChannel->start();
9108        if (rc < 0) {
9109            ALOGE("%s: META channel start failed", __func__);
9110            return rc;
9111        }
9112    }
9113    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9114        it != mStreamInfo.end(); it++) {
9115        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9116        rc = channel->start();
9117        if (rc < 0) {
9118            ALOGE("%s: channel start failed", __func__);
9119            return rc;
9120        }
9121    }
9122    if (mAnalysisChannel) {
9123        mAnalysisChannel->start();
9124    }
9125    if (mSupportChannel) {
9126        rc = mSupportChannel->start();
9127        if (rc < 0) {
9128            ALOGE("%s: Support channel start failed", __func__);
9129            return rc;
9130        }
9131    }
9132    if (mRawDumpChannel) {
9133        rc = mRawDumpChannel->start();
9134        if (rc < 0) {
9135            ALOGE("%s: RAW dump channel start failed", __func__);
9136            return rc;
9137        }
9138    }
9139
9140    CDBG("%s:%d All channels started", __func__, __LINE__);
9141    return rc;
9142}
9143
9144/*===========================================================================
9145 * FUNCTION   : notifyErrorForPendingRequests
9146 *
9147 * DESCRIPTION: This function sends error for all the pending requests/buffers
9148 *
9149 * PARAMETERS : None
9150 *
9151 * RETURN     : Error codes
9152 *              NO_ERROR on success
9153 *
9154 *==========================================================================*/
9155int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9156{
9157    int32_t rc = NO_ERROR;
9158    unsigned int frameNum = 0;
9159    camera3_capture_result_t result;
9160    camera3_stream_buffer_t *pStream_Buf = NULL;
9161    FlushMap flushMap;
9162
9163    memset(&result, 0, sizeof(camera3_capture_result_t));
9164
9165    if (mPendingRequestsList.size() > 0) {
9166        pendingRequestIterator i = mPendingRequestsList.begin();
9167        frameNum = i->frame_number;
9168    } else {
9169        /* There might still be pending buffers even though there are
9170         no pending requests. Setting the frameNum to MAX so that
9171         all the buffers with smaller frame numbers are returned */
9172        frameNum = UINT_MAX;
9173    }
9174
9175    CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
9176      __func__, frameNum);
9177
9178    // Go through the pending buffers and group them depending
9179    // on frame number
9180    for (List<PendingBufferInfo>::iterator k =
9181            mPendingBuffersMap.mPendingBufferList.begin();
9182            k != mPendingBuffersMap.mPendingBufferList.end();) {
9183
9184        if (k->frame_number < frameNum) {
9185            ssize_t idx = flushMap.indexOfKey(k->frame_number);
9186            if (idx == NAME_NOT_FOUND) {
9187                Vector<PendingBufferInfo> pending;
9188                pending.add(*k);
9189                flushMap.add(k->frame_number, pending);
9190            } else {
9191                Vector<PendingBufferInfo> &pending =
9192                        flushMap.editValueFor(k->frame_number);
9193                pending.add(*k);
9194            }
9195
9196            mPendingBuffersMap.num_buffers--;
9197            k = mPendingBuffersMap.mPendingBufferList.erase(k);
9198        } else {
9199            k++;
9200        }
9201    }
9202
9203    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9204        uint32_t frame_number = flushMap.keyAt(iFlush);
9205        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9206
9207        // Send Error notify to frameworks for each buffer for which
9208        // metadata buffer is already sent
9209        CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9210          __func__, frame_number, pending.size());
9211
9212        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9213        if (NULL == pStream_Buf) {
9214            ALOGE("%s: No memory for pending buffers array", __func__);
9215            return NO_MEMORY;
9216        }
9217        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9218
9219        for (size_t j = 0; j < pending.size(); j++) {
9220            const PendingBufferInfo &info = pending.itemAt(j);
9221            camera3_notify_msg_t notify_msg;
9222            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9223            notify_msg.type = CAMERA3_MSG_ERROR;
9224            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9225            notify_msg.message.error.error_stream = info.stream;
9226            notify_msg.message.error.frame_number = frame_number;
9227            pStream_Buf[j].acquire_fence = -1;
9228            pStream_Buf[j].release_fence = -1;
9229            pStream_Buf[j].buffer = info.buffer;
9230            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9231            pStream_Buf[j].stream = info.stream;
9232            mCallbackOps->notify(mCallbackOps, &notify_msg);
9233            CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9234                    frame_number, info.stream);
9235        }
9236
9237        result.result = NULL;
9238        result.frame_number = frame_number;
9239        result.num_output_buffers = (uint32_t)pending.size();
9240        result.output_buffers = pStream_Buf;
9241        mCallbackOps->process_capture_result(mCallbackOps, &result);
9242
9243        delete [] pStream_Buf;
9244    }
9245
9246    CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9247
9248    flushMap.clear();
9249    for (List<PendingBufferInfo>::iterator k =
9250            mPendingBuffersMap.mPendingBufferList.begin();
9251            k != mPendingBuffersMap.mPendingBufferList.end();) {
9252        ssize_t idx = flushMap.indexOfKey(k->frame_number);
9253        if (idx == NAME_NOT_FOUND) {
9254            Vector<PendingBufferInfo> pending;
9255            pending.add(*k);
9256            flushMap.add(k->frame_number, pending);
9257        } else {
9258            Vector<PendingBufferInfo> &pending =
9259                    flushMap.editValueFor(k->frame_number);
9260            pending.add(*k);
9261        }
9262
9263        mPendingBuffersMap.num_buffers--;
9264        k = mPendingBuffersMap.mPendingBufferList.erase(k);
9265    }
9266
9267    pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9268
9269    // Go through the pending requests info and send error request to framework
9270    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9271        uint32_t frame_number = flushMap.keyAt(iFlush);
9272        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9273        CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9274              __func__, frame_number);
9275
9276        // Send shutter notify to frameworks
9277        camera3_notify_msg_t notify_msg;
9278        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9279        notify_msg.type = CAMERA3_MSG_ERROR;
9280        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9281        notify_msg.message.error.error_stream = NULL;
9282        notify_msg.message.error.frame_number = frame_number;
9283        mCallbackOps->notify(mCallbackOps, &notify_msg);
9284
9285        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9286        if (NULL == pStream_Buf) {
9287            ALOGE("%s: No memory for pending buffers array", __func__);
9288            return NO_MEMORY;
9289        }
9290        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9291
9292        for (size_t j = 0; j < pending.size(); j++) {
9293            const PendingBufferInfo &info = pending.itemAt(j);
9294            pStream_Buf[j].acquire_fence = -1;
9295            pStream_Buf[j].release_fence = -1;
9296            pStream_Buf[j].buffer = info.buffer;
9297            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9298            pStream_Buf[j].stream = info.stream;
9299        }
9300
9301        result.input_buffer = i->input_buffer;
9302        result.num_output_buffers = (uint32_t)pending.size();
9303        result.output_buffers = pStream_Buf;
9304        result.result = NULL;
9305        result.frame_number = frame_number;
9306        mCallbackOps->process_capture_result(mCallbackOps, &result);
9307        delete [] pStream_Buf;
9308        i = erasePendingRequest(i);
9309    }
9310
9311    /* Reset pending frame Drop list and requests list */
9312    mPendingFrameDropList.clear();
9313
9314    flushMap.clear();
9315    mPendingBuffersMap.num_buffers = 0;
9316    mPendingBuffersMap.mPendingBufferList.clear();
9317    mPendingReprocessResultList.clear();
9318    CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9319
9320    return rc;
9321}
9322
9323bool QCamera3HardwareInterface::isOnEncoder(
9324        const cam_dimension_t max_viewfinder_size,
9325        uint32_t width, uint32_t height)
9326{
9327    return (width > (uint32_t)max_viewfinder_size.width ||
9328            height > (uint32_t)max_viewfinder_size.height);
9329}
9330
9331/*===========================================================================
9332 * FUNCTION   : setBundleInfo
9333 *
9334 * DESCRIPTION: Set bundle info for all streams that are bundle.
9335 *
9336 * PARAMETERS : None
9337 *
9338 * RETURN     : NO_ERROR on success
9339 *              Error codes on failure
9340 *==========================================================================*/
9341int32_t QCamera3HardwareInterface::setBundleInfo()
9342{
9343    int32_t rc = NO_ERROR;
9344
9345    if (mChannelHandle) {
9346        cam_bundle_config_t bundleInfo;
9347        memset(&bundleInfo, 0, sizeof(bundleInfo));
9348        rc = mCameraHandle->ops->get_bundle_info(
9349                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9350        if (rc != NO_ERROR) {
9351            ALOGE("%s: get_bundle_info failed", __func__);
9352            return rc;
9353        }
9354        if (mAnalysisChannel) {
9355            mAnalysisChannel->setBundleInfo(bundleInfo);
9356        }
9357        if (mSupportChannel) {
9358            mSupportChannel->setBundleInfo(bundleInfo);
9359        }
9360        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9361                it != mStreamInfo.end(); it++) {
9362            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9363            channel->setBundleInfo(bundleInfo);
9364        }
9365        if (mRawDumpChannel) {
9366            mRawDumpChannel->setBundleInfo(bundleInfo);
9367        }
9368    }
9369
9370    return rc;
9371}
9372
9373}; //end namespace qcamera
9374