QCamera3HWI.cpp revision 32939b2560b1ff8a5bbfd09e13b1014e8b3a556f
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define ATRACE_TAG ATRACE_TAG_CAMERA
31#define LOG_TAG "QCamera3HWI"
32//#define LOG_NDEBUG 0
33
34#define __STDC_LIMIT_MACROS
35#include <cutils/properties.h>
36#include <hardware/camera3.h>
37#include <camera/CameraMetadata.h>
38#include <stdio.h>
39#include <stdlib.h>
40#include <fcntl.h>
41#include <stdint.h>
42#include <utils/Log.h>
43#include <utils/Errors.h>
44#include <utils/Trace.h>
45#include <sync/sync.h>
46#include <gralloc_priv.h>
47#include "util/QCameraFlash.h"
48#include "QCamera3HWI.h"
49#include "QCamera3Mem.h"
50#include "QCamera3Channel.h"
51#include "QCamera3PostProc.h"
52#include "QCamera3VendorTags.h"
53
54using namespace android;
55
56namespace qcamera {
57
58#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60#define EMPTY_PIPELINE_DELAY 2
61#define PARTIAL_RESULT_COUNT 2
62#define FRAME_SKIP_DELAY     0
63#define CAM_MAX_SYNC_LATENCY 4
64
65#define MAX_VALUE_8BIT ((1<<8)-1)
66#define MAX_VALUE_10BIT ((1<<10)-1)
67#define MAX_VALUE_12BIT ((1<<12)-1)
68
69#define VIDEO_4K_WIDTH  3840
70#define VIDEO_4K_HEIGHT 2160
71
72#define MAX_EIS_WIDTH 1920
73#define MAX_EIS_HEIGHT 1080
74
75#define MAX_RAW_STREAMS        1
76#define MAX_STALLING_STREAMS   1
77#define MAX_PROCESSED_STREAMS  3
78/* Batch mode is enabled only if FPS set is equal to or greater than this */
79#define MIN_FPS_FOR_BATCH_MODE (120)
80#define PREVIEW_FPS_FOR_HFR    (30)
81#define DEFAULT_VIDEO_FPS      (30.0)
82#define MAX_HFR_BATCH_SIZE     (8)
83#define REGIONS_TUPLE_COUNT    5
84#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
85
86#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
87
88#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
89                                              CAM_QCOM_FEATURE_CROP |\
90                                              CAM_QCOM_FEATURE_ROTATION |\
91                                              CAM_QCOM_FEATURE_SHARPNESS |\
92                                              CAM_QCOM_FEATURE_SCALE |\
93                                              CAM_QCOM_FEATURE_CAC |\
94                                              CAM_QCOM_FEATURE_CDS )
95
96#define TIMEOUT_NEVER -1
97
98cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
99const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
100static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
101volatile uint32_t gCamHal3LogLevel = 1;
102
103const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
104    {"On",  CAM_CDS_MODE_ON},
105    {"Off", CAM_CDS_MODE_OFF},
106    {"Auto",CAM_CDS_MODE_AUTO}
107};
108
109const QCamera3HardwareInterface::QCameraMap<
110        camera_metadata_enum_android_control_effect_mode_t,
111        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
112    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
113    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
114    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
115    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
116    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
117    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
118    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
119    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
120    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
121};
122
123const QCamera3HardwareInterface::QCameraMap<
124        camera_metadata_enum_android_control_awb_mode_t,
125        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
126    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
127    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
128    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
129    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
130    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
131    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
132    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
133    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
134    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
135};
136
137const QCamera3HardwareInterface::QCameraMap<
138        camera_metadata_enum_android_control_scene_mode_t,
139        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
140    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
141    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
142    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
143    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
144    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
145    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
146    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
147    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
148    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
149    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
150    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
151    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
152    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
153    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
154    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
155    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
156};
157
158const QCamera3HardwareInterface::QCameraMap<
159        camera_metadata_enum_android_control_af_mode_t,
160        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
161    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
162    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
163    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
164    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
165    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
166    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
167    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
168};
169
170const QCamera3HardwareInterface::QCameraMap<
171        camera_metadata_enum_android_color_correction_aberration_mode_t,
172        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
173    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
174            CAM_COLOR_CORRECTION_ABERRATION_OFF },
175    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
176            CAM_COLOR_CORRECTION_ABERRATION_FAST },
177    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
178            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_control_ae_antibanding_mode_t,
183        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
184    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
185    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
186    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
187    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
188};
189
190const QCamera3HardwareInterface::QCameraMap<
191        camera_metadata_enum_android_control_ae_mode_t,
192        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
193    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
194    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
195    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
196    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
197    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
198};
199
200const QCamera3HardwareInterface::QCameraMap<
201        camera_metadata_enum_android_flash_mode_t,
202        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
203    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
204    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
205    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209        camera_metadata_enum_android_statistics_face_detect_mode_t,
210        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
211    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
212    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
213    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
214};
215
216const QCamera3HardwareInterface::QCameraMap<
217        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
218        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
219    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
220      CAM_FOCUS_UNCALIBRATED },
221    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
222      CAM_FOCUS_APPROXIMATE },
223    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
224      CAM_FOCUS_CALIBRATED }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_state_t,
229        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
230    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
231    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
232};
233
234const int32_t available_thumbnail_sizes[] = {0, 0,
235                                             176, 144,
236                                             320, 240,
237                                             432, 288,
238                                             480, 288,
239                                             512, 288,
240                                             512, 384};
241
242const QCamera3HardwareInterface::QCameraMap<
243        camera_metadata_enum_android_sensor_test_pattern_mode_t,
244        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
245    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
246    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
247    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
248    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
249    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
250};
251
252/* Since there is no mapping for all the options some Android enum are not listed.
253 * Also, the order in this list is important because while mapping from HAL to Android it will
254 * traverse from lower to higher index which means that for HAL values that are map to different
255 * Android values, the traverse logic will select the first one found.
256 */
257const QCamera3HardwareInterface::QCameraMap<
258        camera_metadata_enum_android_sensor_reference_illuminant1_t,
259        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
260    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
261    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
262    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
263    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
264    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
265    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
266    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
267    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
268    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
269    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
270    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
271    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
272    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
273    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
276};
277
278const QCamera3HardwareInterface::QCameraMap<
279        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
280    { 60, CAM_HFR_MODE_60FPS},
281    { 90, CAM_HFR_MODE_90FPS},
282    { 120, CAM_HFR_MODE_120FPS},
283    { 150, CAM_HFR_MODE_150FPS},
284    { 180, CAM_HFR_MODE_180FPS},
285    { 210, CAM_HFR_MODE_210FPS},
286    { 240, CAM_HFR_MODE_240FPS},
287    { 480, CAM_HFR_MODE_480FPS},
288};
289
290camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
291    initialize:                         QCamera3HardwareInterface::initialize,
292    configure_streams:                  QCamera3HardwareInterface::configure_streams,
293    register_stream_buffers:            NULL,
294    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
295    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
296    get_metadata_vendor_tag_ops:        NULL,
297    dump:                               QCamera3HardwareInterface::dump,
298    flush:                              QCamera3HardwareInterface::flush,
299    reserved:                           {0},
300};
301
302/*===========================================================================
303 * FUNCTION   : QCamera3HardwareInterface
304 *
305 * DESCRIPTION: constructor of QCamera3HardwareInterface
306 *
307 * PARAMETERS :
308 *   @cameraId  : camera ID
309 *
310 * RETURN     : none
311 *==========================================================================*/
312QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
313        const camera_module_callbacks_t *callbacks)
314    : mCameraId(cameraId),
315      mCameraHandle(NULL),
316      mCameraOpened(false),
317      mCameraInitialized(false),
318      mCallbackOps(NULL),
319      mMetadataChannel(NULL),
320      mPictureChannel(NULL),
321      mRawChannel(NULL),
322      mSupportChannel(NULL),
323      mAnalysisChannel(NULL),
324      mRawDumpChannel(NULL),
325      mDummyBatchChannel(NULL),
326      mChannelHandle(0),
327      mFirstRequest(false),
328      mFirstConfiguration(true),
329      mFlush(false),
330      mParamHeap(NULL),
331      mParameters(NULL),
332      mPrevParameters(NULL),
333      m_bIsVideo(false),
334      m_bIs4KVideo(false),
335      m_bEisSupportedSize(false),
336      m_bEisEnable(false),
337      m_MobicatMask(0),
338      mMinProcessedFrameDuration(0),
339      mMinJpegFrameDuration(0),
340      mMinRawFrameDuration(0),
341      mMetaFrameCount(0U),
342      mUpdateDebugLevel(false),
343      mCallbacks(callbacks),
344      mCaptureIntent(0),
345      mBatchSize(0),
346      mToBeQueuedVidBufs(0),
347      mHFRVideoFps(DEFAULT_VIDEO_FPS),
348      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
349      mPrevUrgentFrameNumber(0),
350      mPrevFrameNumber(0),
351      mNeedSensorRestart(false),
352      mLdafCalibExist(false),
353      mPowerHintEnabled(false),
354      mLastCustIntentFrmNum(-1)
355{
356    getLogLevel();
357    m_perfLock.lock_init();
358    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
359    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
360    mCameraDevice.common.close = close_camera_device;
361    mCameraDevice.ops = &mCameraOps;
362    mCameraDevice.priv = this;
363    gCamCapability[cameraId]->version = CAM_HAL_V3;
364    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
365    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
366    gCamCapability[cameraId]->min_num_pp_bufs = 3;
367    pthread_cond_init(&mRequestCond, NULL);
368    mPendingLiveRequest = 0;
369    mCurrentRequestId = -1;
370    pthread_mutex_init(&mMutex, NULL);
371
372    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
373        mDefaultMetadata[i] = NULL;
374
375    // Getting system props of different kinds
376    char prop[PROPERTY_VALUE_MAX];
377    memset(prop, 0, sizeof(prop));
378    property_get("persist.camera.raw.dump", prop, "0");
379    mEnableRawDump = atoi(prop);
380    if (mEnableRawDump)
381        CDBG("%s: Raw dump from Camera HAL enabled", __func__);
382
383    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
384    memset(mLdafCalib, 0, sizeof(mLdafCalib));
385
386    memset(prop, 0, sizeof(prop));
387    property_get("persist.camera.tnr.preview", prop, "1");
388    m_bTnrPreview = (uint8_t)atoi(prop);
389
390    memset(prop, 0, sizeof(prop));
391    property_get("persist.camera.tnr.video", prop, "1");
392    m_bTnrVideo = (uint8_t)atoi(prop);
393}
394
395/*===========================================================================
396 * FUNCTION   : ~QCamera3HardwareInterface
397 *
398 * DESCRIPTION: destructor of QCamera3HardwareInterface
399 *
400 * PARAMETERS : none
401 *
402 * RETURN     : none
403 *==========================================================================*/
404QCamera3HardwareInterface::~QCamera3HardwareInterface()
405{
406    CDBG("%s: E", __func__);
407
408    /* Turn off current power hint before acquiring perfLock in case they
409     * conflict with each other */
410    disablePowerHint();
411
412    m_perfLock.lock_acq();
413
414    /* We need to stop all streams before deleting any stream */
415    if (mRawDumpChannel) {
416        mRawDumpChannel->stop();
417    }
418
419    // NOTE: 'camera3_stream_t *' objects are already freed at
420    //        this stage by the framework
421    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
422        it != mStreamInfo.end(); it++) {
423        QCamera3ProcessingChannel *channel = (*it)->channel;
424        if (channel) {
425            channel->stop();
426        }
427    }
428    if (mSupportChannel)
429        mSupportChannel->stop();
430
431    if (mAnalysisChannel) {
432        mAnalysisChannel->stop();
433    }
434    if (mMetadataChannel) {
435        mMetadataChannel->stop();
436    }
437    if (mChannelHandle) {
438        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
439                mChannelHandle);
440        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
441    }
442
443    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
444        it != mStreamInfo.end(); it++) {
445        QCamera3ProcessingChannel *channel = (*it)->channel;
446        if (channel)
447            delete channel;
448        free (*it);
449    }
450    if (mSupportChannel) {
451        delete mSupportChannel;
452        mSupportChannel = NULL;
453    }
454
455    if (mAnalysisChannel) {
456        delete mAnalysisChannel;
457        mAnalysisChannel = NULL;
458    }
459    if (mRawDumpChannel) {
460        delete mRawDumpChannel;
461        mRawDumpChannel = NULL;
462    }
463    if (mDummyBatchChannel) {
464        delete mDummyBatchChannel;
465        mDummyBatchChannel = NULL;
466    }
467    mPictureChannel = NULL;
468
469    if (mMetadataChannel) {
470        delete mMetadataChannel;
471        mMetadataChannel = NULL;
472    }
473
474    /* Clean up all channels */
475    if (mCameraInitialized) {
476        if(!mFirstConfiguration){
477            //send the last unconfigure
478            cam_stream_size_info_t stream_config_info;
479            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
480            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
481            stream_config_info.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
482            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
483                    stream_config_info);
484            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
485            if (rc < 0) {
486                ALOGE("%s: set_parms failed for unconfigure", __func__);
487            }
488        }
489        deinitParameters();
490    }
491
492    if (mChannelHandle) {
493        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
494                mChannelHandle);
495        ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
496        mChannelHandle = 0;
497    }
498
499    if (mCameraOpened)
500        closeCamera();
501
502    mPendingBuffersMap.mPendingBufferList.clear();
503    mPendingReprocessResultList.clear();
504    for (pendingRequestIterator i = mPendingRequestsList.begin();
505            i != mPendingRequestsList.end();) {
506        i = erasePendingRequest(i);
507    }
508    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
509        if (mDefaultMetadata[i])
510            free_camera_metadata(mDefaultMetadata[i]);
511
512    m_perfLock.lock_rel();
513    m_perfLock.lock_deinit();
514
515    pthread_cond_destroy(&mRequestCond);
516
517    pthread_mutex_destroy(&mMutex);
518    CDBG("%s: X", __func__);
519}
520
521/*===========================================================================
522 * FUNCTION   : erasePendingRequest
523 *
524 * DESCRIPTION: function to erase a desired pending request after freeing any
525 *              allocated memory
526 *
527 * PARAMETERS :
528 *   @i       : iterator pointing to pending request to be erased
529 *
530 * RETURN     : iterator pointing to the next request
531 *==========================================================================*/
532QCamera3HardwareInterface::pendingRequestIterator
533        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
534{
535    if (i->input_buffer != NULL) {
536        free(i->input_buffer);
537        i->input_buffer = NULL;
538    }
539    if (i->settings != NULL)
540        free_camera_metadata((camera_metadata_t*)i->settings);
541    return mPendingRequestsList.erase(i);
542}
543
544/*===========================================================================
545 * FUNCTION   : camEvtHandle
546 *
547 * DESCRIPTION: Function registered to mm-camera-interface to handle events
548 *
549 * PARAMETERS :
550 *   @camera_handle : interface layer camera handle
551 *   @evt           : ptr to event
552 *   @user_data     : user data ptr
553 *
554 * RETURN     : none
555 *==========================================================================*/
556void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
557                                          mm_camera_event_t *evt,
558                                          void *user_data)
559{
560    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
561    if (obj && evt) {
562        switch(evt->server_event_type) {
563            case CAM_EVENT_TYPE_DAEMON_DIED:
564                ALOGE("%s: Fatal, camera daemon died", __func__);
565                //close the camera backend
566                if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
567                        && obj->mCameraHandle->ops) {
568                    obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
569                } else {
570                    ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
571                            __func__);
572                }
573                camera3_notify_msg_t notify_msg;
574                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
575                notify_msg.type = CAMERA3_MSG_ERROR;
576                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
577                notify_msg.message.error.error_stream = NULL;
578                notify_msg.message.error.frame_number = 0;
579                obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
580                break;
581
582            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
583                CDBG("%s: HAL got request pull from Daemon", __func__);
584                pthread_mutex_lock(&obj->mMutex);
585                obj->mWokenUpByDaemon = true;
586                obj->unblockRequestIfNecessary();
587                pthread_mutex_unlock(&obj->mMutex);
588                break;
589
590            default:
591                CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
592                        evt->server_event_type);
593                break;
594        }
595    } else {
596        ALOGE("%s: NULL user_data/evt", __func__);
597    }
598}
599
600/*===========================================================================
601 * FUNCTION   : openCamera
602 *
603 * DESCRIPTION: open camera
604 *
605 * PARAMETERS :
606 *   @hw_device  : double ptr for camera device struct
607 *
608 * RETURN     : int32_t type of status
609 *              NO_ERROR  -- success
610 *              none-zero failure code
611 *==========================================================================*/
612int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
613{
614    int rc = 0;
615    if (mCameraOpened) {
616        *hw_device = NULL;
617        return PERMISSION_DENIED;
618    }
619    m_perfLock.lock_acq();
620    rc = openCamera();
621    if (rc == 0) {
622        *hw_device = &mCameraDevice.common;
623    } else
624        *hw_device = NULL;
625
626    m_perfLock.lock_rel();
627    return rc;
628}
629
630/*===========================================================================
631 * FUNCTION   : openCamera
632 *
633 * DESCRIPTION: open camera
634 *
635 * PARAMETERS : none
636 *
637 * RETURN     : int32_t type of status
638 *              NO_ERROR  -- success
639 *              none-zero failure code
640 *==========================================================================*/
641int QCamera3HardwareInterface::openCamera()
642{
643    int rc = 0;
644
645    ATRACE_CALL();
646    if (mCameraHandle) {
647        ALOGE("Failure: Camera already opened");
648        return ALREADY_EXISTS;
649    }
650
651    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
652    if (rc < 0) {
653        ALOGE("%s: Failed to reserve flash for camera id: %d",
654                __func__,
655                mCameraId);
656        return UNKNOWN_ERROR;
657    }
658
659    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
660    if (rc) {
661        ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
662        return rc;
663    }
664
665    mCameraOpened = true;
666
667    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
668            camEvtHandle, (void *)this);
669
670    if (rc < 0) {
671        ALOGE("%s: Error, failed to register event callback", __func__);
672        /* Not closing camera here since it is already handled in destructor */
673        return FAILED_TRANSACTION;
674    }
675    mFirstConfiguration = true;
676    return NO_ERROR;
677}
678
679/*===========================================================================
680 * FUNCTION   : closeCamera
681 *
682 * DESCRIPTION: close camera
683 *
684 * PARAMETERS : none
685 *
686 * RETURN     : int32_t type of status
687 *              NO_ERROR  -- success
688 *              none-zero failure code
689 *==========================================================================*/
690int QCamera3HardwareInterface::closeCamera()
691{
692    ATRACE_CALL();
693    int rc = NO_ERROR;
694
695    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
696    mCameraHandle = NULL;
697    mCameraOpened = false;
698
699    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
700        CDBG("%s: Failed to release flash for camera id: %d",
701                __func__,
702                mCameraId);
703    }
704
705    return rc;
706}
707
708/*===========================================================================
709 * FUNCTION   : initialize
710 *
711 * DESCRIPTION: Initialize frameworks callback functions
712 *
713 * PARAMETERS :
714 *   @callback_ops : callback function to frameworks
715 *
716 * RETURN     :
717 *
718 *==========================================================================*/
719int QCamera3HardwareInterface::initialize(
720        const struct camera3_callback_ops *callback_ops)
721{
722    ATRACE_CALL();
723    int rc;
724
725    pthread_mutex_lock(&mMutex);
726
727    rc = initParameters();
728    if (rc < 0) {
729        ALOGE("%s: initParamters failed %d", __func__, rc);
730       goto err1;
731    }
732    mCallbackOps = callback_ops;
733
734    mChannelHandle = mCameraHandle->ops->add_channel(
735            mCameraHandle->camera_handle, NULL, NULL, this);
736    if (mChannelHandle == 0) {
737        ALOGE("%s: add_channel failed", __func__);
738        rc = -ENOMEM;
739        pthread_mutex_unlock(&mMutex);
740        return rc;
741    }
742
743    pthread_mutex_unlock(&mMutex);
744    mCameraInitialized = true;
745    return 0;
746
747err1:
748    pthread_mutex_unlock(&mMutex);
749    return rc;
750}
751
752/*===========================================================================
753 * FUNCTION   : validateStreamDimensions
754 *
755 * DESCRIPTION: Check if the configuration requested are those advertised
756 *
757 * PARAMETERS :
758 *   @stream_list : streams to be configured
759 *
760 * RETURN     :
761 *
762 *==========================================================================*/
763int QCamera3HardwareInterface::validateStreamDimensions(
764        camera3_stream_configuration_t *streamList)
765{
766    int rc = NO_ERROR;
767    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
768    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
769    size_t count = 0;
770
771    camera3_stream_t *inputStream = NULL;
772    /*
773    * Loop through all streams to find input stream if it exists*
774    */
775    for (size_t i = 0; i< streamList->num_streams; i++) {
776        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
777            if (inputStream != NULL) {
778                ALOGE("%s: Error, Multiple input streams requested");
779                return -EINVAL;
780            }
781            inputStream = streamList->streams[i];
782        }
783    }
784    /*
785    * Loop through all streams requested in configuration
786    * Check if unsupported sizes have been requested on any of them
787    */
788    for (size_t j = 0; j < streamList->num_streams; j++) {
789        bool sizeFound = false;
790        size_t jpeg_sizes_cnt = 0;
791        camera3_stream_t *newStream = streamList->streams[j];
792
793        uint32_t rotatedHeight = newStream->height;
794        uint32_t rotatedWidth = newStream->width;
795        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
796                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
797            rotatedHeight = newStream->width;
798            rotatedWidth = newStream->height;
799        }
800
801        /*
802        * Sizes are different for each type of stream format check against
803        * appropriate table.
804        */
805        switch (newStream->format) {
806        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
807        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
808        case HAL_PIXEL_FORMAT_RAW10:
809            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
810            for (size_t i = 0; i < count; i++) {
811                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
812                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
813                    sizeFound = true;
814                    break;
815                }
816            }
817            break;
818        case HAL_PIXEL_FORMAT_BLOB:
819            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
820            /* Generate JPEG sizes table */
821            makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
822                    count,
823                    MAX_SIZES_CNT,
824                    available_processed_sizes);
825            jpeg_sizes_cnt = filterJpegSizes(
826                    available_jpeg_sizes,
827                    available_processed_sizes,
828                    count * 2,
829                    MAX_SIZES_CNT * 2,
830                    gCamCapability[mCameraId]->active_array_size,
831                    gCamCapability[mCameraId]->max_downscale_factor);
832
833            /* Verify set size against generated sizes table */
834            for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
835                if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
836                        ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
837                    sizeFound = true;
838                    break;
839                }
840            }
841            break;
842        case HAL_PIXEL_FORMAT_YCbCr_420_888:
843        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
844        default:
845            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
846                    || newStream->stream_type == CAMERA3_STREAM_INPUT
847                    || IS_USAGE_ZSL(newStream->usage)) {
848                if (((int32_t)rotatedWidth ==
849                                gCamCapability[mCameraId]->active_array_size.width) &&
850                                ((int32_t)rotatedHeight ==
851                                gCamCapability[mCameraId]->active_array_size.height)) {
852                    sizeFound = true;
853                    break;
854                }
855                /* We could potentially break here to enforce ZSL stream
856                 * set from frameworks always is full active array size
857                 * but it is not clear from the spc if framework will always
858                 * follow that, also we have logic to override to full array
859                 * size, so keeping the logic lenient at the moment
860                 */
861            }
862            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
863                    MAX_SIZES_CNT);
864            for (size_t i = 0; i < count; i++) {
865                if (((int32_t)rotatedWidth ==
866                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
867                            ((int32_t)rotatedHeight ==
868                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
869                    sizeFound = true;
870                    break;
871                }
872            }
873            break;
874        } /* End of switch(newStream->format) */
875
876        /* We error out even if a single stream has unsupported size set */
877        if (!sizeFound) {
878            ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
879                  "type:%d", __func__, rotatedWidth, rotatedHeight,
880                  newStream->format);
881            ALOGE("%s: Active array size is  %d x %d", __func__,
882                    gCamCapability[mCameraId]->active_array_size.width,
883                    gCamCapability[mCameraId]->active_array_size.height);
884            rc = -EINVAL;
885            break;
886        }
887    } /* End of for each stream */
888    return rc;
889}
890
891/*==============================================================================
892 * FUNCTION   : isSupportChannelNeeded
893 *
894 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
895 *
896 * PARAMETERS :
897 *   @stream_list : streams to be configured
898 *   @stream_config_info : the config info for streams to be configured
899 *
900 * RETURN     : Boolen true/false decision
901 *
902 *==========================================================================*/
903bool QCamera3HardwareInterface::isSupportChannelNeeded(
904        camera3_stream_configuration_t *streamList,
905        cam_stream_size_info_t stream_config_info)
906{
907    uint32_t i;
908    bool pprocRequested = false;
909    /* Check for conditions where PProc pipeline does not have any streams*/
910    for (i = 0; i < stream_config_info.num_streams; i++) {
911        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
912                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
913            pprocRequested = true;
914            break;
915        }
916    }
917
918    if (pprocRequested == false )
919        return true;
920
921    /* Dummy stream needed if only raw or jpeg streams present */
922    for (i = 0; i < streamList->num_streams; i++) {
923        switch(streamList->streams[i]->format) {
924            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
925            case HAL_PIXEL_FORMAT_RAW10:
926            case HAL_PIXEL_FORMAT_RAW16:
927            case HAL_PIXEL_FORMAT_BLOB:
928                break;
929            default:
930                return false;
931        }
932    }
933    return true;
934}
935
936/*==============================================================================
937 * FUNCTION   : getSensorOutputSize
938 *
939 * DESCRIPTION: Get sensor output size based on current stream configuratoin
940 *
941 * PARAMETERS :
942 *   @sensor_dim : sensor output dimension (output)
943 *
944 * RETURN     : int32_t type of status
945 *              NO_ERROR  -- success
946 *              none-zero failure code
947 *
948 *==========================================================================*/
949int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
950{
951    int32_t rc = NO_ERROR;
952
953    cam_dimension_t max_dim = {0, 0};
954    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
955        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
956            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
957        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
958            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
959    }
960
961    clear_metadata_buffer(mParameters);
962
963    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
964            max_dim);
965    if (rc != NO_ERROR) {
966        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
967        return rc;
968    }
969
970    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
971    if (rc != NO_ERROR) {
972        ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
973        return rc;
974    }
975
976    clear_metadata_buffer(mParameters);
977    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
978
979    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
980            mParameters);
981    if (rc != NO_ERROR) {
982        ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
983        return rc;
984    }
985
986    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
987    ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
988
989    return rc;
990}
991
992/*==============================================================================
993 * FUNCTION   : enablePowerHint
994 *
995 * DESCRIPTION: enable single powerhint for preview and different video modes.
996 *
997 * PARAMETERS :
998 *
999 * RETURN     : NULL
1000 *
1001 *==========================================================================*/
1002void QCamera3HardwareInterface::enablePowerHint()
1003{
1004    if (!mPowerHintEnabled) {
1005        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1006        mPowerHintEnabled = true;
1007    }
1008}
1009
1010/*==============================================================================
1011 * FUNCTION   : disablePowerHint
1012 *
1013 * DESCRIPTION: disable current powerhint.
1014 *
1015 * PARAMETERS :
1016 *
1017 * RETURN     : NULL
1018 *
1019 *==========================================================================*/
1020void QCamera3HardwareInterface::disablePowerHint()
1021{
1022    if (mPowerHintEnabled) {
1023        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1024        mPowerHintEnabled = false;
1025    }
1026}
1027
1028/*===========================================================================
1029 * FUNCTION   : configureStreams
1030 *
1031 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1032 *              and output streams.
1033 *
1034 * PARAMETERS :
1035 *   @stream_list : streams to be configured
1036 *
1037 * RETURN     :
1038 *
1039 *==========================================================================*/
1040int QCamera3HardwareInterface::configureStreams(
1041        camera3_stream_configuration_t *streamList)
1042{
1043    ATRACE_CALL();
1044    int rc = 0;
1045
1046    // Acquire perfLock before configure streams
1047    m_perfLock.lock_acq();
1048    rc = configureStreamsPerfLocked(streamList);
1049    m_perfLock.lock_rel();
1050
1051    return rc;
1052}
1053
1054/*===========================================================================
1055 * FUNCTION   : configureStreamsPerfLocked
1056 *
1057 * DESCRIPTION: configureStreams while perfLock is held.
1058 *
1059 * PARAMETERS :
1060 *   @stream_list : streams to be configured
1061 *
1062 * RETURN     : int32_t type of status
1063 *              NO_ERROR  -- success
1064 *              none-zero failure code
1065 *==========================================================================*/
1066int QCamera3HardwareInterface::configureStreamsPerfLocked(
1067        camera3_stream_configuration_t *streamList)
1068{
1069    ATRACE_CALL();
1070    int rc = 0;
1071
1072    // Sanity check stream_list
1073    if (streamList == NULL) {
1074        ALOGE("%s: NULL stream configuration", __func__);
1075        return BAD_VALUE;
1076    }
1077    if (streamList->streams == NULL) {
1078        ALOGE("%s: NULL stream list", __func__);
1079        return BAD_VALUE;
1080    }
1081
1082    if (streamList->num_streams < 1) {
1083        ALOGE("%s: Bad number of streams requested: %d", __func__,
1084                streamList->num_streams);
1085        return BAD_VALUE;
1086    }
1087
1088    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1089        ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1090                MAX_NUM_STREAMS, streamList->num_streams);
1091        return BAD_VALUE;
1092    }
1093
1094    mOpMode = streamList->operation_mode;
1095    CDBG("%s: mOpMode: %d", __func__, mOpMode);
1096
1097    /* first invalidate all the steams in the mStreamList
1098     * if they appear again, they will be validated */
1099    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1100            it != mStreamInfo.end(); it++) {
1101        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1102        channel->stop();
1103        (*it)->status = INVALID;
1104    }
1105
1106    if (mRawDumpChannel) {
1107        mRawDumpChannel->stop();
1108        delete mRawDumpChannel;
1109        mRawDumpChannel = NULL;
1110    }
1111
1112    if (mSupportChannel)
1113        mSupportChannel->stop();
1114
1115    if (mAnalysisChannel) {
1116        mAnalysisChannel->stop();
1117    }
1118    if (mMetadataChannel) {
1119        /* If content of mStreamInfo is not 0, there is metadata stream */
1120        mMetadataChannel->stop();
1121    }
1122    if (mChannelHandle) {
1123        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1124                mChannelHandle);
1125        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1126    }
1127
1128    pthread_mutex_lock(&mMutex);
1129
1130    /* Check whether we have video stream */
1131    m_bIs4KVideo = false;
1132    m_bIsVideo = false;
1133    m_bEisSupportedSize = false;
1134    m_bTnrEnabled = false;
1135    bool isZsl = false;
1136    uint32_t videoWidth = 0U;
1137    uint32_t videoHeight = 0U;
1138    size_t rawStreamCnt = 0;
1139    size_t stallStreamCnt = 0;
1140    size_t processedStreamCnt = 0;
1141    // Number of streams on ISP encoder path
1142    size_t numStreamsOnEncoder = 0;
1143    size_t numYuv888OnEncoder = 0;
1144    bool bYuv888OverrideJpeg = false;
1145    cam_dimension_t largeYuv888Size = {0, 0};
1146    cam_dimension_t maxViewfinderSize = {0, 0};
1147    bool bJpegExceeds4K = false;
1148    bool bUseCommonFeatureMask = false;
1149    uint32_t commonFeatureMask = 0;
1150    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1151    camera3_stream_t *inputStream = NULL;
1152    bool isJpeg = false;
1153    cam_dimension_t jpegSize = {0, 0};
1154
1155    /*EIS configuration*/
1156    bool eisSupported = false;
1157    bool oisSupported = false;
1158    int32_t margin_index = -1;
1159    uint8_t eis_prop_set;
1160    uint32_t maxEisWidth = 0;
1161    uint32_t maxEisHeight = 0;
1162    int32_t hal_version = CAM_HAL_V3;
1163
1164    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1165
1166    size_t count = IS_TYPE_MAX;
1167    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1168    for (size_t i = 0; i < count; i++) {
1169        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1170            eisSupported = true;
1171            margin_index = (int32_t)i;
1172            break;
1173        }
1174    }
1175
1176    count = CAM_OPT_STAB_MAX;
1177    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1178    for (size_t i = 0; i < count; i++) {
1179        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1180            oisSupported = true;
1181            break;
1182        }
1183    }
1184
1185    if (eisSupported) {
1186        maxEisWidth = MAX_EIS_WIDTH;
1187        maxEisHeight = MAX_EIS_HEIGHT;
1188    }
1189
1190    /* EIS setprop control */
1191    char eis_prop[PROPERTY_VALUE_MAX];
1192    memset(eis_prop, 0, sizeof(eis_prop));
1193    property_get("persist.camera.eis.enable", eis_prop, "0");
1194    eis_prop_set = (uint8_t)atoi(eis_prop);
1195
1196    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1197            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1198
1199    /* stream configurations */
1200    for (size_t i = 0; i < streamList->num_streams; i++) {
1201        camera3_stream_t *newStream = streamList->streams[i];
1202        ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1203                "height = %d, rotation = %d, usage = 0x%x",
1204                __func__, i, newStream->stream_type, newStream->format,
1205                newStream->width, newStream->height, newStream->rotation,
1206                newStream->usage);
1207        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1208                newStream->stream_type == CAMERA3_STREAM_INPUT){
1209            isZsl = true;
1210        }
1211        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1212            inputStream = newStream;
1213        }
1214
1215        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1216            isJpeg = true;
1217            jpegSize.width = newStream->width;
1218            jpegSize.height = newStream->height;
1219            if (newStream->width > VIDEO_4K_WIDTH ||
1220                    newStream->height > VIDEO_4K_HEIGHT)
1221                bJpegExceeds4K = true;
1222        }
1223
1224        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1225                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1226            m_bIsVideo = true;
1227            videoWidth = newStream->width;
1228            videoHeight = newStream->height;
1229            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1230                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1231                m_bIs4KVideo = true;
1232            }
1233            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1234                                  (newStream->height <= maxEisHeight);
1235        }
1236        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1237                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1238            switch (newStream->format) {
1239            case HAL_PIXEL_FORMAT_BLOB:
1240                stallStreamCnt++;
1241                if (isOnEncoder(maxViewfinderSize, newStream->width,
1242                        newStream->height)) {
1243                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1244                    numStreamsOnEncoder++;
1245                }
1246                break;
1247            case HAL_PIXEL_FORMAT_RAW10:
1248            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1249            case HAL_PIXEL_FORMAT_RAW16:
1250                rawStreamCnt++;
1251                break;
1252            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1253                processedStreamCnt++;
1254                if (isOnEncoder(maxViewfinderSize, newStream->width,
1255                        newStream->height)) {
1256                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1257                            IS_USAGE_ZSL(newStream->usage)) {
1258                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1259                    } else {
1260                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1261                    }
1262                    numStreamsOnEncoder++;
1263                }
1264                break;
1265            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1266                processedStreamCnt++;
1267                if (isOnEncoder(maxViewfinderSize, newStream->width,
1268                        newStream->height)) {
1269                    // If Yuv888 size is not greater than 4K, set feature mask
1270                    // to SUPERSET so that it support concurrent request on
1271                    // YUV and JPEG.
1272                    if (newStream->width <= VIDEO_4K_WIDTH &&
1273                            newStream->height <= VIDEO_4K_HEIGHT) {
1274                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1275                    } else {
1276                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1277                    }
1278                    numStreamsOnEncoder++;
1279                    numYuv888OnEncoder++;
1280                    largeYuv888Size.width = newStream->width;
1281                    largeYuv888Size.height = newStream->height;
1282                }
1283                break;
1284            default:
1285                processedStreamCnt++;
1286                if (isOnEncoder(maxViewfinderSize, newStream->width,
1287                        newStream->height)) {
1288                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1289                    numStreamsOnEncoder++;
1290                }
1291                break;
1292            }
1293
1294        }
1295    }
1296
1297    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1298        !m_bIsVideo) {
1299        m_bEisEnable = false;
1300    }
1301
1302    /* Logic to enable/disable TNR based on specific config size/etc.*/
1303    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1304            ((videoWidth == 1920 && videoHeight == 1080) ||
1305            (videoWidth == 1280 && videoHeight == 720)) &&
1306            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1307        m_bTnrEnabled = true;
1308
1309    /* Check if num_streams is sane */
1310    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1311            rawStreamCnt > MAX_RAW_STREAMS ||
1312            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1313        ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1314                __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1315        pthread_mutex_unlock(&mMutex);
1316        return -EINVAL;
1317    }
1318    /* Check whether we have zsl stream or 4k video case */
1319    if (isZsl && m_bIsVideo) {
1320        ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1321        pthread_mutex_unlock(&mMutex);
1322        return -EINVAL;
1323    }
1324    /* Check if stream sizes are sane */
1325    if (numStreamsOnEncoder > 2) {
1326        ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1327                __func__);
1328        pthread_mutex_unlock(&mMutex);
1329        return -EINVAL;
1330    } else if (1 < numStreamsOnEncoder){
1331        bUseCommonFeatureMask = true;
1332        CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1333                __func__);
1334    }
1335
1336    /* Check if BLOB size is greater than 4k in 4k recording case */
1337    if (m_bIs4KVideo && bJpegExceeds4K) {
1338        ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1339                __func__);
1340        pthread_mutex_unlock(&mMutex);
1341        return -EINVAL;
1342    }
1343
1344    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1345    // the YUV stream's size is greater or equal to the JPEG size, set common
1346    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1347    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1348            jpegSize.width, jpegSize.height) &&
1349            largeYuv888Size.width > jpegSize.width &&
1350            largeYuv888Size.height > jpegSize.height) {
1351        bYuv888OverrideJpeg = true;
1352    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1353        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1354    }
1355
1356    rc = validateStreamDimensions(streamList);
1357    if (rc == NO_ERROR) {
1358        rc = validateStreamRotations(streamList);
1359    }
1360    if (rc != NO_ERROR) {
1361        ALOGE("%s: Invalid stream configuration requested!", __func__);
1362        pthread_mutex_unlock(&mMutex);
1363        return rc;
1364    }
1365
1366    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1367    camera3_stream_t *jpegStream = NULL;
1368    for (size_t i = 0; i < streamList->num_streams; i++) {
1369        camera3_stream_t *newStream = streamList->streams[i];
1370        CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1371                "stream size : %d x %d, stream rotation = %d",
1372                __func__, newStream->stream_type, newStream->format,
1373                newStream->width, newStream->height, newStream->rotation);
1374        //if the stream is in the mStreamList validate it
1375        bool stream_exists = false;
1376        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1377                it != mStreamInfo.end(); it++) {
1378            if ((*it)->stream == newStream) {
1379                QCamera3ProcessingChannel *channel =
1380                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1381                stream_exists = true;
1382                if (channel)
1383                    delete channel;
1384                (*it)->status = VALID;
1385                (*it)->stream->priv = NULL;
1386                (*it)->channel = NULL;
1387            }
1388        }
1389        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1390            //new stream
1391            stream_info_t* stream_info;
1392            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1393            if (!stream_info) {
1394               ALOGE("%s: Could not allocate stream info", __func__);
1395               rc = -ENOMEM;
1396               pthread_mutex_unlock(&mMutex);
1397               return rc;
1398            }
1399            stream_info->stream = newStream;
1400            stream_info->status = VALID;
1401            stream_info->channel = NULL;
1402            mStreamInfo.push_back(stream_info);
1403        }
1404        /* Covers Opaque ZSL and API1 F/W ZSL */
1405        if (IS_USAGE_ZSL(newStream->usage)
1406                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1407            if (zslStream != NULL) {
1408                ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1409                pthread_mutex_unlock(&mMutex);
1410                return BAD_VALUE;
1411            }
1412            zslStream = newStream;
1413        }
1414        /* Covers YUV reprocess */
1415        if (inputStream != NULL) {
1416            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1417                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1418                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1419                    && inputStream->width == newStream->width
1420                    && inputStream->height == newStream->height) {
1421                if (zslStream != NULL) {
1422                    /* This scenario indicates multiple YUV streams with same size
1423                     * as input stream have been requested, since zsl stream handle
1424                     * is solely use for the purpose of overriding the size of streams
1425                     * which share h/w streams we will just make a guess here as to
1426                     * which of the stream is a ZSL stream, this will be refactored
1427                     * once we make generic logic for streams sharing encoder output
1428                     */
1429                    CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1430                }
1431                zslStream = newStream;
1432            }
1433        }
1434        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1435            jpegStream = newStream;
1436        }
1437    }
1438
1439    /* If a zsl stream is set, we know that we have configured at least one input or
1440       bidirectional stream */
1441    if (NULL != zslStream) {
1442        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1443        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1444        mInputStreamInfo.format = zslStream->format;
1445        mInputStreamInfo.usage = zslStream->usage;
1446        CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1447                __func__, mInputStreamInfo.dim.width,
1448                mInputStreamInfo.dim.height,
1449                mInputStreamInfo.format, mInputStreamInfo.usage);
1450    }
1451
1452    cleanAndSortStreamInfo();
1453    if (mMetadataChannel) {
1454        delete mMetadataChannel;
1455        mMetadataChannel = NULL;
1456    }
1457    if (mSupportChannel) {
1458        delete mSupportChannel;
1459        mSupportChannel = NULL;
1460    }
1461
1462    if (mAnalysisChannel) {
1463        delete mAnalysisChannel;
1464        mAnalysisChannel = NULL;
1465    }
1466
1467    if (mDummyBatchChannel) {
1468        delete mDummyBatchChannel;
1469        mDummyBatchChannel = NULL;
1470    }
1471
1472    //Create metadata channel and initialize it
1473    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1474                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1475                    &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1476    if (mMetadataChannel == NULL) {
1477        ALOGE("%s: failed to allocate metadata channel", __func__);
1478        rc = -ENOMEM;
1479        pthread_mutex_unlock(&mMutex);
1480        return rc;
1481    }
1482    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1483    if (rc < 0) {
1484        ALOGE("%s: metadata channel initialization failed", __func__);
1485        delete mMetadataChannel;
1486        mMetadataChannel = NULL;
1487        pthread_mutex_unlock(&mMutex);
1488        return rc;
1489    }
1490
1491    // Create analysis stream all the time, even when h/w support is not available
1492    {
1493        mAnalysisChannel = new QCamera3SupportChannel(
1494                mCameraHandle->camera_handle,
1495                mChannelHandle,
1496                mCameraHandle->ops,
1497                &gCamCapability[mCameraId]->padding_info,
1498                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1499                CAM_STREAM_TYPE_ANALYSIS,
1500                &gCamCapability[mCameraId]->analysis_recommended_res,
1501                gCamCapability[mCameraId]->analysis_recommended_format,
1502                this,
1503                0); // force buffer count to 0
1504        if (!mAnalysisChannel) {
1505            ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1506            pthread_mutex_unlock(&mMutex);
1507            return -ENOMEM;
1508        }
1509    }
1510
1511    bool isRawStreamRequested = false;
1512    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1513    /* Allocate channel objects for the requested streams */
1514    for (size_t i = 0; i < streamList->num_streams; i++) {
1515        camera3_stream_t *newStream = streamList->streams[i];
1516        uint32_t stream_usage = newStream->usage;
1517        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1518        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1519        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1520                || IS_USAGE_ZSL(newStream->usage)) &&
1521            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1522            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1523            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1524        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1525                CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1526        } else {
1527            //for non zsl streams find out the format
1528            switch (newStream->format) {
1529            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1530              {
1531                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1532                         = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1533
1534                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1535
1536                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1537                     if (m_bTnrEnabled && m_bTnrVideo) {
1538                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1539                             CAM_QCOM_FEATURE_CPP_TNR;
1540                     }
1541
1542                 } else {
1543
1544                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1545                     if (m_bTnrEnabled && m_bTnrPreview) {
1546                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1547                             CAM_QCOM_FEATURE_CPP_TNR;
1548                     }
1549                 }
1550
1551                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1552                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1553                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1554                             newStream->height;
1555                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1556                             newStream->width;
1557                 }
1558              }
1559              break;
1560           case HAL_PIXEL_FORMAT_YCbCr_420_888:
1561              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1562              if (isOnEncoder(maxViewfinderSize, newStream->width,
1563                      newStream->height)) {
1564                  if (bUseCommonFeatureMask)
1565                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1566                              commonFeatureMask;
1567                  else
1568                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1569                              CAM_QCOM_FEATURE_NONE;
1570              } else {
1571                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1572                          CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1573              }
1574              break;
1575           case HAL_PIXEL_FORMAT_BLOB:
1576              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1577              if (m_bIs4KVideo && !isZsl) {
1578                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1579                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1580              } else {
1581                  if (bUseCommonFeatureMask &&
1582                          isOnEncoder(maxViewfinderSize, newStream->width,
1583                                  newStream->height)) {
1584                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1585                  } else {
1586                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1587                  }
1588              }
1589              if (isZsl) {
1590                  if (zslStream) {
1591                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1592                              (int32_t)zslStream->width;
1593                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1594                              (int32_t)zslStream->height;
1595                  } else {
1596                      ALOGE("%s: Error, No ZSL stream identified",__func__);
1597                      pthread_mutex_unlock(&mMutex);
1598                      return -EINVAL;
1599                  }
1600              } else if (m_bIs4KVideo) {
1601                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1602                          (int32_t)videoWidth;
1603                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1604                          (int32_t)videoHeight;
1605              } else if (bYuv888OverrideJpeg) {
1606                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1607                          (int32_t)largeYuv888Size.width;
1608                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1609                          (int32_t)largeYuv888Size.height;
1610              }
1611              break;
1612           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1613           case HAL_PIXEL_FORMAT_RAW16:
1614           case HAL_PIXEL_FORMAT_RAW10:
1615              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1616              isRawStreamRequested = true;
1617              break;
1618           default:
1619              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1620              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1621              break;
1622            }
1623
1624        }
1625
1626        if (newStream->priv == NULL) {
1627            //New stream, construct channel
1628            switch (newStream->stream_type) {
1629            case CAMERA3_STREAM_INPUT:
1630                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1631                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1632                break;
1633            case CAMERA3_STREAM_BIDIRECTIONAL:
1634                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1635                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1636                break;
1637            case CAMERA3_STREAM_OUTPUT:
1638                /* For video encoding stream, set read/write rarely
1639                 * flag so that they may be set to un-cached */
1640                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1641                    newStream->usage |=
1642                         (GRALLOC_USAGE_SW_READ_RARELY |
1643                         GRALLOC_USAGE_SW_WRITE_RARELY |
1644                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1645                else if (IS_USAGE_ZSL(newStream->usage))
1646                    CDBG("%s: ZSL usage flag skipping", __func__);
1647                else if (newStream == zslStream
1648                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1649                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1650                } else
1651                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1652                break;
1653            default:
1654                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1655                break;
1656            }
1657
1658            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1659                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1660                QCamera3ProcessingChannel *channel = NULL;
1661                switch (newStream->format) {
1662                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1663                    if ((newStream->usage &
1664                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1665                            (streamList->operation_mode ==
1666                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1667                    ) {
1668                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1669                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1670                                &gCamCapability[mCameraId]->padding_info,
1671                                this,
1672                                newStream,
1673                                (cam_stream_type_t)
1674                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1675                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1676                                mMetadataChannel,
1677                                0); //heap buffers are not required for HFR video channel
1678                        if (channel == NULL) {
1679                            ALOGE("%s: allocation of channel failed", __func__);
1680                            pthread_mutex_unlock(&mMutex);
1681                            return -ENOMEM;
1682                        }
1683                        //channel->getNumBuffers() will return 0 here so use
1684                        //MAX_INFLIGH_HFR_REQUESTS
1685                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1686                        newStream->priv = channel;
1687                        ALOGI("%s: num video buffers in HFR mode: %d",
1688                                __func__, MAX_INFLIGHT_HFR_REQUESTS);
1689                    } else {
1690                        /* Copy stream contents in HFR preview only case to create
1691                         * dummy batch channel so that sensor streaming is in
1692                         * HFR mode */
1693                        if (!m_bIsVideo && (streamList->operation_mode ==
1694                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1695                            mDummyBatchStream = *newStream;
1696                        }
1697                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1698                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1699                                &gCamCapability[mCameraId]->padding_info,
1700                                this,
1701                                newStream,
1702                                (cam_stream_type_t)
1703                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1704                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1705                                mMetadataChannel,
1706                                MAX_INFLIGHT_REQUESTS);
1707                        if (channel == NULL) {
1708                            ALOGE("%s: allocation of channel failed", __func__);
1709                            pthread_mutex_unlock(&mMutex);
1710                            return -ENOMEM;
1711                        }
1712                        newStream->max_buffers = channel->getNumBuffers();
1713                        newStream->priv = channel;
1714                    }
1715                    break;
1716                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1717                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1718                            mChannelHandle,
1719                            mCameraHandle->ops, captureResultCb,
1720                            &gCamCapability[mCameraId]->padding_info,
1721                            this,
1722                            newStream,
1723                            (cam_stream_type_t)
1724                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1725                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1726                            mMetadataChannel);
1727                    if (channel == NULL) {
1728                        ALOGE("%s: allocation of YUV channel failed", __func__);
1729                        pthread_mutex_unlock(&mMutex);
1730                        return -ENOMEM;
1731                    }
1732                    newStream->max_buffers = channel->getNumBuffers();
1733                    newStream->priv = channel;
1734                    break;
1735                }
1736                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1737                case HAL_PIXEL_FORMAT_RAW16:
1738                case HAL_PIXEL_FORMAT_RAW10:
1739                    mRawChannel = new QCamera3RawChannel(
1740                            mCameraHandle->camera_handle, mChannelHandle,
1741                            mCameraHandle->ops, captureResultCb,
1742                            &gCamCapability[mCameraId]->padding_info,
1743                            this, newStream, CAM_QCOM_FEATURE_NONE,
1744                            mMetadataChannel,
1745                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1746                    if (mRawChannel == NULL) {
1747                        ALOGE("%s: allocation of raw channel failed", __func__);
1748                        pthread_mutex_unlock(&mMutex);
1749                        return -ENOMEM;
1750                    }
1751                    newStream->max_buffers = mRawChannel->getNumBuffers();
1752                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1753                    break;
1754                case HAL_PIXEL_FORMAT_BLOB:
1755                    // Max live snapshot inflight buffer is 1. This is to mitigate
1756                    // frame drop issues for video snapshot. The more buffers being
1757                    // allocated, the more frame drops there are.
1758                    mPictureChannel = new QCamera3PicChannel(
1759                            mCameraHandle->camera_handle, mChannelHandle,
1760                            mCameraHandle->ops, captureResultCb,
1761                            &gCamCapability[mCameraId]->padding_info, this, newStream,
1762                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1763                            m_bIs4KVideo, isZsl, mMetadataChannel,
1764                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1765                    if (mPictureChannel == NULL) {
1766                        ALOGE("%s: allocation of channel failed", __func__);
1767                        pthread_mutex_unlock(&mMutex);
1768                        return -ENOMEM;
1769                    }
1770                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1771                    newStream->max_buffers = mPictureChannel->getNumBuffers();
1772                    mPictureChannel->overrideYuvSize(
1773                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1774                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1775                    break;
1776
1777                default:
1778                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1779                    break;
1780                }
1781            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1782                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1783            } else {
1784                ALOGE("%s: Error, Unknown stream type", __func__);
1785                return -EINVAL;
1786            }
1787
1788            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1789                    it != mStreamInfo.end(); it++) {
1790                if ((*it)->stream == newStream) {
1791                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1792                    break;
1793                }
1794            }
1795        } else {
1796            // Channel already exists for this stream
1797            // Do nothing for now
1798        }
1799
1800    /* Do not add entries for input stream in metastream info
1801         * since there is no real stream associated with it
1802         */
1803        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1804            mStreamConfigInfo.num_streams++;
1805    }
1806
1807    //RAW DUMP channel
1808    if (mEnableRawDump && isRawStreamRequested == false){
1809        cam_dimension_t rawDumpSize;
1810        rawDumpSize = getMaxRawSize(mCameraId);
1811        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1812                                  mChannelHandle,
1813                                  mCameraHandle->ops,
1814                                  rawDumpSize,
1815                                  &gCamCapability[mCameraId]->padding_info,
1816                                  this, CAM_QCOM_FEATURE_NONE);
1817        if (!mRawDumpChannel) {
1818            ALOGE("%s: Raw Dump channel cannot be created", __func__);
1819            pthread_mutex_unlock(&mMutex);
1820            return -ENOMEM;
1821        }
1822    }
1823
1824
1825    if (mAnalysisChannel) {
1826        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1827                gCamCapability[mCameraId]->analysis_recommended_res;
1828        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1829                CAM_STREAM_TYPE_ANALYSIS;
1830        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1831                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1832        mStreamConfigInfo.num_streams++;
1833    }
1834
1835    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1836        mSupportChannel = new QCamera3SupportChannel(
1837                mCameraHandle->camera_handle,
1838                mChannelHandle,
1839                mCameraHandle->ops,
1840                &gCamCapability[mCameraId]->padding_info,
1841                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1842                CAM_STREAM_TYPE_CALLBACK,
1843                &QCamera3SupportChannel::kDim,
1844                CAM_FORMAT_YUV_420_NV21,
1845                this);
1846        if (!mSupportChannel) {
1847            ALOGE("%s: dummy channel cannot be created", __func__);
1848            pthread_mutex_unlock(&mMutex);
1849            return -ENOMEM;
1850        }
1851    }
1852
1853    if (mSupportChannel) {
1854        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1855                QCamera3SupportChannel::kDim;
1856        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1857                CAM_STREAM_TYPE_CALLBACK;
1858        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1859                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1860        mStreamConfigInfo.num_streams++;
1861    }
1862
1863    if (mRawDumpChannel) {
1864        cam_dimension_t rawSize;
1865        rawSize = getMaxRawSize(mCameraId);
1866        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1867                rawSize;
1868        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1869                CAM_STREAM_TYPE_RAW;
1870        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1871                CAM_QCOM_FEATURE_NONE;
1872        mStreamConfigInfo.num_streams++;
1873    }
1874    /* In HFR mode, if video stream is not added, create a dummy channel so that
1875     * ISP can create a batch mode even for preview only case. This channel is
1876     * never 'start'ed (no stream-on), it is only 'initialized'  */
1877    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1878            !m_bIsVideo) {
1879        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1880                mChannelHandle,
1881                mCameraHandle->ops, captureResultCb,
1882                &gCamCapability[mCameraId]->padding_info,
1883                this,
1884                &mDummyBatchStream,
1885                CAM_STREAM_TYPE_VIDEO,
1886                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1887                mMetadataChannel);
1888        if (NULL == mDummyBatchChannel) {
1889            ALOGE("%s: creation of mDummyBatchChannel failed."
1890                    "Preview will use non-hfr sensor mode ", __func__);
1891        }
1892    }
1893    if (mDummyBatchChannel) {
1894        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1895                mDummyBatchStream.width;
1896        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1897                mDummyBatchStream.height;
1898        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1899                CAM_STREAM_TYPE_VIDEO;
1900        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1901                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1902        mStreamConfigInfo.num_streams++;
1903    }
1904
1905    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1906    mStreamConfigInfo.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
1907
1908    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1909    for (pendingRequestIterator i = mPendingRequestsList.begin();
1910            i != mPendingRequestsList.end();) {
1911        i = erasePendingRequest(i);
1912    }
1913    mPendingFrameDropList.clear();
1914    // Initialize/Reset the pending buffers list
1915    mPendingBuffersMap.num_buffers = 0;
1916    mPendingBuffersMap.mPendingBufferList.clear();
1917    mPendingReprocessResultList.clear();
1918
1919    mFirstRequest = true;
1920    mCurJpegMeta.clear();
1921    //Get min frame duration for this streams configuration
1922    deriveMinFrameDuration();
1923
1924    /* Turn on video hint only if video stream is configured */
1925
1926    pthread_mutex_unlock(&mMutex);
1927
1928    return rc;
1929}
1930
1931/*===========================================================================
1932 * FUNCTION   : validateCaptureRequest
1933 *
1934 * DESCRIPTION: validate a capture request from camera service
1935 *
1936 * PARAMETERS :
1937 *   @request : request from framework to process
1938 *
1939 * RETURN     :
1940 *
1941 *==========================================================================*/
1942int QCamera3HardwareInterface::validateCaptureRequest(
1943                    camera3_capture_request_t *request)
1944{
1945    ssize_t idx = 0;
1946    const camera3_stream_buffer_t *b;
1947    CameraMetadata meta;
1948
1949    /* Sanity check the request */
1950    if (request == NULL) {
1951        ALOGE("%s: NULL capture request", __func__);
1952        return BAD_VALUE;
1953    }
1954
1955    if (request->settings == NULL && mFirstRequest) {
1956        /*settings cannot be null for the first request*/
1957        return BAD_VALUE;
1958    }
1959
1960    uint32_t frameNumber = request->frame_number;
1961    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1962        ALOGE("%s: Request %d: No output buffers provided!",
1963                __FUNCTION__, frameNumber);
1964        return BAD_VALUE;
1965    }
1966    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
1967        ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
1968                __func__, request->num_output_buffers, MAX_NUM_STREAMS);
1969        return BAD_VALUE;
1970    }
1971    if (request->input_buffer != NULL) {
1972        b = request->input_buffer;
1973        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1974            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1975                    __func__, frameNumber, (long)idx);
1976            return BAD_VALUE;
1977        }
1978        if (b->release_fence != -1) {
1979            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1980                    __func__, frameNumber, (long)idx);
1981            return BAD_VALUE;
1982        }
1983        if (b->buffer == NULL) {
1984            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1985                    __func__, frameNumber, (long)idx);
1986            return BAD_VALUE;
1987        }
1988    }
1989
1990    // Validate all buffers
1991    b = request->output_buffers;
1992    do {
1993        QCamera3ProcessingChannel *channel =
1994                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
1995        if (channel == NULL) {
1996            ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1997                    __func__, frameNumber, (long)idx);
1998            return BAD_VALUE;
1999        }
2000        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2001            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2002                    __func__, frameNumber, (long)idx);
2003            return BAD_VALUE;
2004        }
2005        if (b->release_fence != -1) {
2006            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2007                    __func__, frameNumber, (long)idx);
2008            return BAD_VALUE;
2009        }
2010        if (b->buffer == NULL) {
2011            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2012                    __func__, frameNumber, (long)idx);
2013            return BAD_VALUE;
2014        }
2015        if (*(b->buffer) == NULL) {
2016            ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2017                    __func__, frameNumber, (long)idx);
2018            return BAD_VALUE;
2019        }
2020        idx++;
2021        b = request->output_buffers + idx;
2022    } while (idx < (ssize_t)request->num_output_buffers);
2023
2024    return NO_ERROR;
2025}
2026
2027/*===========================================================================
2028 * FUNCTION   : deriveMinFrameDuration
2029 *
2030 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2031 *              on currently configured streams.
2032 *
2033 * PARAMETERS : NONE
2034 *
2035 * RETURN     : NONE
2036 *
2037 *==========================================================================*/
2038void QCamera3HardwareInterface::deriveMinFrameDuration()
2039{
2040    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2041
2042    maxJpegDim = 0;
2043    maxProcessedDim = 0;
2044    maxRawDim = 0;
2045
2046    // Figure out maximum jpeg, processed, and raw dimensions
2047    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2048        it != mStreamInfo.end(); it++) {
2049
2050        // Input stream doesn't have valid stream_type
2051        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2052            continue;
2053
2054        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2055        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2056            if (dimension > maxJpegDim)
2057                maxJpegDim = dimension;
2058        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2059                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2060                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2061            if (dimension > maxRawDim)
2062                maxRawDim = dimension;
2063        } else {
2064            if (dimension > maxProcessedDim)
2065                maxProcessedDim = dimension;
2066        }
2067    }
2068
2069    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2070            MAX_SIZES_CNT);
2071
2072    //Assume all jpeg dimensions are in processed dimensions.
2073    if (maxJpegDim > maxProcessedDim)
2074        maxProcessedDim = maxJpegDim;
2075    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2076    if (maxProcessedDim > maxRawDim) {
2077        maxRawDim = INT32_MAX;
2078
2079        for (size_t i = 0; i < count; i++) {
2080            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2081                    gCamCapability[mCameraId]->raw_dim[i].height;
2082            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2083                maxRawDim = dimension;
2084        }
2085    }
2086
2087    //Find minimum durations for processed, jpeg, and raw
2088    for (size_t i = 0; i < count; i++) {
2089        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2090                gCamCapability[mCameraId]->raw_dim[i].height) {
2091            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2092            break;
2093        }
2094    }
2095    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2096    for (size_t i = 0; i < count; i++) {
2097        if (maxProcessedDim ==
2098                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2099                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2100            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2101            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2102            break;
2103        }
2104    }
2105}
2106
2107/*===========================================================================
2108 * FUNCTION   : getMinFrameDuration
2109 *
2110 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2111 *              and current request configuration.
2112 *
2113 * PARAMETERS : @request: requset sent by the frameworks
2114 *
2115 * RETURN     : min farme duration for a particular request
2116 *
2117 *==========================================================================*/
2118int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2119{
2120    bool hasJpegStream = false;
2121    bool hasRawStream = false;
2122    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2123        const camera3_stream_t *stream = request->output_buffers[i].stream;
2124        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2125            hasJpegStream = true;
2126        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2127                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2128                stream->format == HAL_PIXEL_FORMAT_RAW16)
2129            hasRawStream = true;
2130    }
2131
2132    if (!hasJpegStream)
2133        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2134    else
2135        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2136}
2137
2138/*===========================================================================
2139 * FUNCTION   : handlePendingReprocResults
2140 *
2141 * DESCRIPTION: check and notify on any pending reprocess results
2142 *
2143 * PARAMETERS :
2144 *   @frame_number   : Pending request frame number
2145 *
2146 * RETURN     : int32_t type of status
2147 *              NO_ERROR  -- success
2148 *              none-zero failure code
2149 *==========================================================================*/
2150int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2151{
2152    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2153            j != mPendingReprocessResultList.end(); j++) {
2154        if (j->frame_number == frame_number) {
2155            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2156
2157            CDBG("%s: Delayed reprocess notify %d", __func__,
2158                    frame_number);
2159
2160            for (pendingRequestIterator k = mPendingRequestsList.begin();
2161                    k != mPendingRequestsList.end(); k++) {
2162
2163                if (k->frame_number == j->frame_number) {
2164                    CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2165                            "Take it out!!", __func__,
2166                            k->frame_number);
2167
2168                    camera3_capture_result result;
2169                    memset(&result, 0, sizeof(camera3_capture_result));
2170                    result.frame_number = frame_number;
2171                    result.num_output_buffers = 1;
2172                    result.output_buffers =  &j->buffer;
2173                    result.input_buffer = k->input_buffer;
2174                    result.result = k->settings;
2175                    result.partial_result = PARTIAL_RESULT_COUNT;
2176                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2177
2178                    erasePendingRequest(k);
2179                    break;
2180                }
2181            }
2182            mPendingReprocessResultList.erase(j);
2183            break;
2184        }
2185    }
2186    return NO_ERROR;
2187}
2188
2189/*===========================================================================
2190 * FUNCTION   : handleBatchMetadata
2191 *
2192 * DESCRIPTION: Handles metadata buffer callback in batch mode
2193 *
2194 * PARAMETERS : @metadata_buf: metadata buffer
2195 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2196 *                 the meta buf in this method
2197 *
2198 * RETURN     :
2199 *
2200 *==========================================================================*/
2201void QCamera3HardwareInterface::handleBatchMetadata(
2202        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2203{
2204    ATRACE_CALL();
2205
2206    if (NULL == metadata_buf) {
2207        ALOGE("%s: metadata_buf is NULL", __func__);
2208        return;
2209    }
2210    /* In batch mode, the metdata will contain the frame number and timestamp of
2211     * the last frame in the batch. Eg: a batch containing buffers from request
2212     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2213     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2214     * multiple process_capture_results */
2215    metadata_buffer_t *metadata =
2216            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2217    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2218    uint32_t last_frame_number, last_urgent_frame_number;
2219    uint32_t frame_number, urgent_frame_number = 0;
2220    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2221    bool invalid_metadata = false;
2222    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2223    size_t loopCount = 1;
2224
2225    int32_t *p_frame_number_valid =
2226            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2227    uint32_t *p_frame_number =
2228            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2229    int64_t *p_capture_time =
2230            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2231    int32_t *p_urgent_frame_number_valid =
2232            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2233    uint32_t *p_urgent_frame_number =
2234            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2235
2236    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2237            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2238            (NULL == p_urgent_frame_number)) {
2239        ALOGE("%s: Invalid metadata", __func__);
2240        invalid_metadata = true;
2241    } else {
2242        frame_number_valid = *p_frame_number_valid;
2243        last_frame_number = *p_frame_number;
2244        last_frame_capture_time = *p_capture_time;
2245        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2246        last_urgent_frame_number = *p_urgent_frame_number;
2247    }
2248
2249    // If reported capture_time is 0, skip handling this metadata
2250    if (!last_frame_capture_time) {
2251        goto done_batch_metadata;
2252    }
2253    /* In batchmode, when no video buffers are requested, set_parms are sent
2254     * for every capture_request. The difference between consecutive urgent
2255     * frame numbers and frame numbers should be used to interpolate the
2256     * corresponding frame numbers and time stamps */
2257    if (urgent_frame_number_valid) {
2258        /* Frame numbers start with 0, handle it in the else condition */
2259        if (last_urgent_frame_number &&
2260                (last_urgent_frame_number >= mPrevUrgentFrameNumber)) {
2261            urgentFrameNumDiff = last_urgent_frame_number - mPrevUrgentFrameNumber;
2262        } else {
2263            urgentFrameNumDiff = 1;
2264        }
2265        mPrevUrgentFrameNumber = last_urgent_frame_number;
2266    }
2267    if (frame_number_valid) {
2268        /* Frame numbers start with 0, handle it in the else condition */
2269        if(last_frame_number && (last_frame_number >= mPrevFrameNumber)) {
2270            frameNumDiff = last_frame_number - mPrevFrameNumber;
2271        } else {
2272            frameNumDiff = 1;
2273        }
2274        mPrevFrameNumber = last_frame_number;
2275    }
2276    if (urgent_frame_number_valid || frame_number_valid) {
2277        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2278        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2279            ALOGE("%s: urgentFrameNumDiff: %d", __func__, urgentFrameNumDiff);
2280        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2281            ALOGE("%s: frameNumDiff: %d", __func__, frameNumDiff);
2282
2283    }
2284
2285    CDBG("%s: urgent_frm: valid: %d frm_num: %d previous frm_num: %d",
2286            __func__, urgent_frame_number_valid, last_urgent_frame_number,
2287            mPrevUrgentFrameNumber);
2288    CDBG("%s:        frm: valid: %d frm_num: %d previous frm_num:: %d",
2289            __func__, frame_number_valid, last_frame_number, mPrevFrameNumber);
2290
2291    //TODO: Need to ensure, metadata is not posted with the same frame numbers
2292    //when urgentFrameNumDiff != frameNumDiff
2293    for (size_t i = 0; i < loopCount; i++) {
2294        /* handleMetadataWithLock is called even for invalid_metadata for
2295         * pipeline depth calculation */
2296        if (!invalid_metadata) {
2297            /* Infer frame number. Batch metadata contains frame number of the
2298             * last frame */
2299            if (urgent_frame_number_valid) {
2300                if (i < urgentFrameNumDiff) {
2301                    urgent_frame_number =
2302                            last_urgent_frame_number + 1 - urgentFrameNumDiff + i;
2303                    CDBG("%s: inferred urgent frame_number: %d",
2304                            __func__, urgent_frame_number);
2305                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2306                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2307                } else {
2308                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2309                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2310                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2311                }
2312            }
2313
2314            /* Infer frame number. Batch metadata contains frame number of the
2315             * last frame */
2316            if (frame_number_valid) {
2317                if (i < frameNumDiff) {
2318                    frame_number = last_frame_number + 1 - frameNumDiff + i;
2319                    CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2320                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2321                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2322                } else {
2323                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2324                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2325                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2326                }
2327            }
2328
2329            //Infer timestamp
2330            first_frame_capture_time = last_frame_capture_time -
2331                    (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2332            capture_time =
2333                    first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2334            ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2335                    CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2336            CDBG("%s: batch capture_time: %lld, capture_time: %lld",
2337                    __func__, last_frame_capture_time, capture_time);
2338        }
2339        pthread_mutex_lock(&mMutex);
2340        handleMetadataWithLock(metadata_buf,
2341                false /* free_and_bufdone_meta_buf */);
2342        pthread_mutex_unlock(&mMutex);
2343    }
2344
2345done_batch_metadata:
2346    /* BufDone metadata buffer */
2347    if (free_and_bufdone_meta_buf) {
2348        mMetadataChannel->bufDone(metadata_buf);
2349        free(metadata_buf);
2350    }
2351}
2352
2353/*===========================================================================
2354 * FUNCTION   : handleMetadataWithLock
2355 *
2356 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2357 *
2358 * PARAMETERS : @metadata_buf: metadata buffer
2359 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2360 *                 the meta buf in this method
2361 *
2362 * RETURN     :
2363 *
2364 *==========================================================================*/
2365void QCamera3HardwareInterface::handleMetadataWithLock(
2366    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2367{
2368    ATRACE_CALL();
2369
2370    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2371    int32_t frame_number_valid, urgent_frame_number_valid;
2372    uint32_t frame_number, urgent_frame_number;
2373    int64_t capture_time;
2374
2375    int32_t *p_frame_number_valid =
2376            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2377    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2378    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2379    int32_t *p_urgent_frame_number_valid =
2380            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2381    uint32_t *p_urgent_frame_number =
2382            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2383    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2384            metadata) {
2385        CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2386                __func__, *p_frame_number_valid, *p_frame_number);
2387    }
2388
2389    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2390            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2391        ALOGE("%s: Invalid metadata", __func__);
2392        if (free_and_bufdone_meta_buf) {
2393            mMetadataChannel->bufDone(metadata_buf);
2394            free(metadata_buf);
2395        }
2396        goto done_metadata;
2397    } else {
2398        frame_number_valid = *p_frame_number_valid;
2399        frame_number = *p_frame_number;
2400        capture_time = *p_capture_time;
2401        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2402        urgent_frame_number = *p_urgent_frame_number;
2403    }
2404    //Partial result on process_capture_result for timestamp
2405    if (urgent_frame_number_valid) {
2406        CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2407          __func__, urgent_frame_number, capture_time);
2408
2409        //Recieved an urgent Frame Number, handle it
2410        //using partial results
2411        for (pendingRequestIterator i =
2412                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2413            CDBG("%s: Iterator Frame = %d urgent frame = %d",
2414                __func__, i->frame_number, urgent_frame_number);
2415
2416            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2417                (i->partial_result_cnt == 0)) {
2418                ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2419                    __func__, i->frame_number);
2420            }
2421
2422            if (i->frame_number == urgent_frame_number &&
2423                     i->bUrgentReceived == 0) {
2424
2425                camera3_capture_result_t result;
2426                memset(&result, 0, sizeof(camera3_capture_result_t));
2427
2428                i->partial_result_cnt++;
2429                i->bUrgentReceived = 1;
2430                // Extract 3A metadata
2431                result.result =
2432                    translateCbUrgentMetadataToResultMetadata(metadata);
2433                // Populate metadata result
2434                result.frame_number = urgent_frame_number;
2435                result.num_output_buffers = 0;
2436                result.output_buffers = NULL;
2437                result.partial_result = i->partial_result_cnt;
2438
2439                mCallbackOps->process_capture_result(mCallbackOps, &result);
2440                CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2441                     __func__, result.frame_number, capture_time);
2442                free_camera_metadata((camera_metadata_t *)result.result);
2443                break;
2444            }
2445        }
2446    }
2447
2448    if (!frame_number_valid) {
2449        CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2450        if (free_and_bufdone_meta_buf) {
2451            mMetadataChannel->bufDone(metadata_buf);
2452            free(metadata_buf);
2453        }
2454        goto done_metadata;
2455    }
2456    CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2457            frame_number, capture_time);
2458
2459    for (pendingRequestIterator i = mPendingRequestsList.begin();
2460            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2461        // Flush out all entries with less or equal frame numbers.
2462
2463        camera3_capture_result_t result;
2464        memset(&result, 0, sizeof(camera3_capture_result_t));
2465
2466        CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2467        i->partial_result_cnt++;
2468        result.partial_result = i->partial_result_cnt;
2469
2470        // Check whether any stream buffer corresponding to this is dropped or not
2471        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2472        // The API does not expect a blob buffer to be dropped
2473        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2474            /* Clear notify_msg structure */
2475            camera3_notify_msg_t notify_msg;
2476            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2477            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2478                    j != i->buffers.end(); j++) {
2479               if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2480                   QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2481                   uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2482                   for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2483                       if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2484                           // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2485                           ALOGW("%s: Start of reporting error frame#=%u, streamID=%u",
2486                                   __func__, i->frame_number, streamID);
2487                           notify_msg.type = CAMERA3_MSG_ERROR;
2488                           notify_msg.message.error.frame_number = i->frame_number;
2489                           notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2490                           notify_msg.message.error.error_stream = j->stream;
2491                           mCallbackOps->notify(mCallbackOps, &notify_msg);
2492                           ALOGW("%s: End of reporting error frame#=%u, streamID=%u",
2493                                  __func__, i->frame_number, streamID);
2494                           PendingFrameDropInfo PendingFrameDrop;
2495                           PendingFrameDrop.frame_number=i->frame_number;
2496                           PendingFrameDrop.stream_ID = streamID;
2497                           // Add the Frame drop info to mPendingFrameDropList
2498                           mPendingFrameDropList.push_back(PendingFrameDrop);
2499                      }
2500                   }
2501               } else {
2502                   ALOGE("%s: JPEG buffer dropped for frame number %d",
2503                           __func__, i->frame_number);
2504               }
2505            }
2506        }
2507
2508        //TODO: batch handling for dropped metadata
2509
2510        // Send empty metadata with already filled buffers for dropped metadata
2511        // and send valid metadata with already filled buffers for current metadata
2512        /* we could hit this case when we either
2513         * 1. have a pending reprocess request or
2514         * 2. miss a metadata buffer callback */
2515        if (i->frame_number < frame_number) {
2516            /* Clear notify_msg structure */
2517            camera3_notify_msg_t notify_msg;
2518            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2519            notify_msg.type = CAMERA3_MSG_SHUTTER;
2520            notify_msg.message.shutter.frame_number = i->frame_number;
2521            notify_msg.message.shutter.timestamp = (uint64_t)capture_time -
2522                        (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
2523            if (i->input_buffer) {
2524                i->partial_result_cnt++; //input request will not have urgent metadata
2525                CameraMetadata settings;
2526                if(i->settings) {
2527                    settings = i->settings;
2528                    if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2529                        nsecs_t input_capture_time =
2530                                settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2531                        notify_msg.message.shutter.timestamp = (uint64_t)input_capture_time;
2532                    } else {
2533                        ALOGE("%s: No timestamp in input settings! Using current one.",
2534                                __func__);
2535                    }
2536                } else {
2537                    ALOGE("%s: Input settings missing!", __func__);
2538                }
2539                result.result = settings.release();
2540                result.partial_result = i->partial_result_cnt;
2541                CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2542                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2543            } else {
2544                mPendingLiveRequest--;
2545                CameraMetadata dummyMetadata;
2546                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
2547                        &i->timestamp, 1);
2548                dummyMetadata.update(ANDROID_REQUEST_ID,
2549                        &(i->request_id), 1);
2550                result.result = dummyMetadata.release();
2551            }
2552            mCallbackOps->notify(mCallbackOps, &notify_msg);
2553            i->timestamp = (nsecs_t)notify_msg.message.shutter.timestamp;
2554            CDBG("%s: Support notification !!!! notify frame_number = %u, capture_time = %llu",
2555                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2556        } else {
2557            mPendingLiveRequest--;
2558            /* Clear notify_msg structure */
2559            camera3_notify_msg_t notify_msg;
2560            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2561
2562            // Send shutter notify to frameworks
2563            notify_msg.type = CAMERA3_MSG_SHUTTER;
2564            notify_msg.message.shutter.frame_number = i->frame_number;
2565            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2566            mCallbackOps->notify(mCallbackOps, &notify_msg);
2567
2568            i->timestamp = capture_time;
2569
2570            // Find channel requiring metadata, meaning internal offline postprocess
2571            // is needed.
2572            //TODO: for now, we don't support two streams requiring metadata at the same time.
2573            // (because we are not making copies, and metadata buffer is not reference counted.
2574            bool internalPproc = false;
2575            for (pendingBufferIterator iter = i->buffers.begin();
2576                    iter != i->buffers.end(); iter++) {
2577                if (iter->need_metadata) {
2578                    internalPproc = true;
2579                    QCamera3ProcessingChannel *channel =
2580                            (QCamera3ProcessingChannel *)iter->stream->priv;
2581                    channel->queueReprocMetadata(metadata_buf);
2582                    break;
2583                }
2584            }
2585
2586            result.result = translateFromHalMetadata(metadata,
2587                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2588                    i->capture_intent, internalPproc);
2589
2590            saveExifParams(metadata);
2591
2592            if (i->blob_request) {
2593                {
2594                    //Dump tuning metadata if enabled and available
2595                    char prop[PROPERTY_VALUE_MAX];
2596                    memset(prop, 0, sizeof(prop));
2597                    property_get("persist.camera.dumpmetadata", prop, "0");
2598                    int32_t enabled = atoi(prop);
2599                    if (enabled && metadata->is_tuning_params_valid) {
2600                        dumpMetadataToFile(metadata->tuning_params,
2601                               mMetaFrameCount,
2602                               enabled,
2603                               "Snapshot",
2604                               frame_number);
2605                    }
2606                }
2607            }
2608
2609            if (!internalPproc) {
2610                CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2611                // Return metadata buffer
2612                if (free_and_bufdone_meta_buf) {
2613                    mMetadataChannel->bufDone(metadata_buf);
2614                    free(metadata_buf);
2615                }
2616            }
2617        }
2618        if (!result.result) {
2619            ALOGE("%s: metadata is NULL", __func__);
2620        }
2621        result.frame_number = i->frame_number;
2622        result.input_buffer = i->input_buffer;
2623        result.num_output_buffers = 0;
2624        result.output_buffers = NULL;
2625        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2626                    j != i->buffers.end(); j++) {
2627            if (j->buffer) {
2628                result.num_output_buffers++;
2629            }
2630        }
2631
2632        if (result.num_output_buffers > 0) {
2633            camera3_stream_buffer_t *result_buffers =
2634                new camera3_stream_buffer_t[result.num_output_buffers];
2635            if (!result_buffers) {
2636                ALOGE("%s: Fatal error: out of memory", __func__);
2637            }
2638            size_t result_buffers_idx = 0;
2639            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2640                    j != i->buffers.end(); j++) {
2641                if (j->buffer) {
2642                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2643                            m != mPendingFrameDropList.end(); m++) {
2644                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2645                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2646                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2647                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2648                            ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2649                                  __func__, frame_number, streamID);
2650                            m = mPendingFrameDropList.erase(m);
2651                            break;
2652                        }
2653                    }
2654
2655                    for (List<PendingBufferInfo>::iterator k =
2656                      mPendingBuffersMap.mPendingBufferList.begin();
2657                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2658                      if (k->buffer == j->buffer->buffer) {
2659                        CDBG("%s: Found buffer %p in pending buffer List "
2660                              "for frame %u, Take it out!!", __func__,
2661                               k->buffer, k->frame_number);
2662                        mPendingBuffersMap.num_buffers--;
2663                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2664                        break;
2665                      }
2666                    }
2667
2668                    result_buffers[result_buffers_idx++] = *(j->buffer);
2669                    free(j->buffer);
2670                    j->buffer = NULL;
2671                }
2672            }
2673            result.output_buffers = result_buffers;
2674            mCallbackOps->process_capture_result(mCallbackOps, &result);
2675            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2676                    __func__, __LINE__, result.frame_number, i->timestamp);
2677            free_camera_metadata((camera_metadata_t *)result.result);
2678            delete[] result_buffers;
2679        } else {
2680            mCallbackOps->process_capture_result(mCallbackOps, &result);
2681            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2682                        __func__, __LINE__, result.frame_number, i->timestamp);
2683            free_camera_metadata((camera_metadata_t *)result.result);
2684        }
2685        // erase the element from the list
2686        i = erasePendingRequest(i);
2687
2688        if (!mPendingReprocessResultList.empty()) {
2689            handlePendingReprocResults(frame_number + 1);
2690        }
2691    }
2692
2693done_metadata:
2694    for (pendingRequestIterator i = mPendingRequestsList.begin();
2695            i != mPendingRequestsList.end() ;i++) {
2696        i->pipeline_depth++;
2697    }
2698    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2699    unblockRequestIfNecessary();
2700
2701}
2702
2703/*===========================================================================
2704 * FUNCTION   : hdrPlusPerfLock
2705 *
2706 * DESCRIPTION: perf lock for HDR+ using custom intent
2707 *
2708 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2709 *
2710 * RETURN     : None
2711 *
2712 *==========================================================================*/
2713void QCamera3HardwareInterface::hdrPlusPerfLock(
2714        mm_camera_super_buf_t *metadata_buf)
2715{
2716    if (NULL == metadata_buf) {
2717        ALOGE("%s: metadata_buf is NULL", __func__);
2718        return;
2719    }
2720    metadata_buffer_t *metadata =
2721            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2722    int32_t *p_frame_number_valid =
2723            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2724    uint32_t *p_frame_number =
2725            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2726
2727    //acquire perf lock for 5 sec after the last HDR frame is captured
2728    if (*p_frame_number_valid) {
2729        if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2730            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2731        }
2732    }
2733
2734    //release lock after perf lock timer is expired. If lock is already released,
2735    //isTimerReset returns false
2736    if (m_perfLock.isTimerReset()) {
2737        mLastCustIntentFrmNum = -1;
2738        m_perfLock.lock_rel_timed();
2739    }
2740}
2741/*===========================================================================
2742 * FUNCTION   : handleBufferWithLock
2743 *
2744 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2745 *
2746 * PARAMETERS : @buffer: image buffer for the callback
2747 *              @frame_number: frame number of the image buffer
2748 *
2749 * RETURN     :
2750 *
2751 *==========================================================================*/
2752void QCamera3HardwareInterface::handleBufferWithLock(
2753    camera3_stream_buffer_t *buffer, uint32_t frame_number)
2754{
2755    ATRACE_CALL();
2756    // If the frame number doesn't exist in the pending request list,
2757    // directly send the buffer to the frameworks, and update pending buffers map
2758    // Otherwise, book-keep the buffer.
2759    pendingRequestIterator i = mPendingRequestsList.begin();
2760    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2761        i++;
2762    }
2763    if (i == mPendingRequestsList.end()) {
2764        // Verify all pending requests frame_numbers are greater
2765        for (pendingRequestIterator j = mPendingRequestsList.begin();
2766                j != mPendingRequestsList.end(); j++) {
2767            if (j->frame_number < frame_number) {
2768                ALOGE("%s: Error: pending frame number %d is smaller than %d",
2769                        __func__, j->frame_number, frame_number);
2770            }
2771        }
2772        camera3_capture_result_t result;
2773        memset(&result, 0, sizeof(camera3_capture_result_t));
2774        result.result = NULL;
2775        result.frame_number = frame_number;
2776        result.num_output_buffers = 1;
2777        result.partial_result = 0;
2778        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2779                m != mPendingFrameDropList.end(); m++) {
2780            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
2781            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2782            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
2783                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2784                CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
2785                        __func__, frame_number, streamID);
2786                m = mPendingFrameDropList.erase(m);
2787                break;
2788            }
2789        }
2790        result.output_buffers = buffer;
2791        CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
2792                __func__, frame_number, buffer->buffer);
2793
2794        for (List<PendingBufferInfo>::iterator k =
2795                mPendingBuffersMap.mPendingBufferList.begin();
2796                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2797            if (k->buffer == buffer->buffer) {
2798                CDBG("%s: Found Frame buffer, take it out from list",
2799                        __func__);
2800
2801                mPendingBuffersMap.num_buffers--;
2802                k = mPendingBuffersMap.mPendingBufferList.erase(k);
2803                break;
2804            }
2805        }
2806        CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2807            __func__, mPendingBuffersMap.num_buffers);
2808
2809        mCallbackOps->process_capture_result(mCallbackOps, &result);
2810    } else {
2811        if (i->input_buffer) {
2812            CameraMetadata settings;
2813            camera3_notify_msg_t notify_msg;
2814            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2815            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2816            if(i->settings) {
2817                settings = i->settings;
2818                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2819                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2820                } else {
2821                    ALOGE("%s: No timestamp in input settings! Using current one.",
2822                            __func__);
2823                }
2824            } else {
2825                ALOGE("%s: Input settings missing!", __func__);
2826            }
2827
2828            notify_msg.type = CAMERA3_MSG_SHUTTER;
2829            notify_msg.message.shutter.frame_number = frame_number;
2830            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2831
2832            if (i->input_buffer->release_fence != -1) {
2833               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2834               close(i->input_buffer->release_fence);
2835               if (rc != OK) {
2836               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2837               }
2838            }
2839
2840            for (List<PendingBufferInfo>::iterator k =
2841                    mPendingBuffersMap.mPendingBufferList.begin();
2842                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2843                if (k->buffer == buffer->buffer) {
2844                    CDBG("%s: Found Frame buffer, take it out from list",
2845                            __func__);
2846
2847                    mPendingBuffersMap.num_buffers--;
2848                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
2849                    break;
2850                }
2851            }
2852            CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2853                __func__, mPendingBuffersMap.num_buffers);
2854
2855            bool notifyNow = true;
2856            for (pendingRequestIterator j = mPendingRequestsList.begin();
2857                    j != mPendingRequestsList.end(); j++) {
2858                if (j->frame_number < frame_number) {
2859                    notifyNow = false;
2860                    break;
2861                }
2862            }
2863
2864            if (notifyNow) {
2865                camera3_capture_result result;
2866                memset(&result, 0, sizeof(camera3_capture_result));
2867                result.frame_number = frame_number;
2868                result.result = i->settings;
2869                result.input_buffer = i->input_buffer;
2870                result.num_output_buffers = 1;
2871                result.output_buffers = buffer;
2872                result.partial_result = PARTIAL_RESULT_COUNT;
2873
2874                mCallbackOps->notify(mCallbackOps, &notify_msg);
2875                mCallbackOps->process_capture_result(mCallbackOps, &result);
2876                CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2877                i = erasePendingRequest(i);
2878            } else {
2879                // Cache reprocess result for later
2880                PendingReprocessResult pendingResult;
2881                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2882                pendingResult.notify_msg = notify_msg;
2883                pendingResult.buffer = *buffer;
2884                pendingResult.frame_number = frame_number;
2885                mPendingReprocessResultList.push_back(pendingResult);
2886                CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2887            }
2888        } else {
2889            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2890                j != i->buffers.end(); j++) {
2891                if (j->stream == buffer->stream) {
2892                    if (j->buffer != NULL) {
2893                        ALOGE("%s: Error: buffer is already set", __func__);
2894                    } else {
2895                        j->buffer = (camera3_stream_buffer_t *)malloc(
2896                            sizeof(camera3_stream_buffer_t));
2897                        *(j->buffer) = *buffer;
2898                        CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
2899                            __func__, buffer, frame_number);
2900                    }
2901                }
2902            }
2903        }
2904    }
2905}
2906
2907/*===========================================================================
2908 * FUNCTION   : unblockRequestIfNecessary
2909 *
2910 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2911 *              that mMutex is held when this function is called.
2912 *
2913 * PARAMETERS :
2914 *
2915 * RETURN     :
2916 *
2917 *==========================================================================*/
2918void QCamera3HardwareInterface::unblockRequestIfNecessary()
2919{
2920   // Unblock process_capture_request
2921   pthread_cond_signal(&mRequestCond);
2922}
2923
2924
2925/*===========================================================================
2926 * FUNCTION   : processCaptureRequest
2927 *
2928 * DESCRIPTION: process a capture request from camera service
2929 *
2930 * PARAMETERS :
2931 *   @request : request from framework to process
2932 *
2933 * RETURN     :
2934 *
2935 *==========================================================================*/
2936int QCamera3HardwareInterface::processCaptureRequest(
2937                    camera3_capture_request_t *request)
2938{
2939    ATRACE_CALL();
2940    int rc = NO_ERROR;
2941    int32_t request_id;
2942    CameraMetadata meta;
2943    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
2944    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
2945    bool isVidBufRequested = false;
2946    camera3_stream_buffer_t *pInputBuffer = NULL;
2947
2948    pthread_mutex_lock(&mMutex);
2949
2950    rc = validateCaptureRequest(request);
2951    if (rc != NO_ERROR) {
2952        ALOGE("%s: incoming request is not valid", __func__);
2953        pthread_mutex_unlock(&mMutex);
2954        return rc;
2955    }
2956
2957    meta = request->settings;
2958
2959    // For first capture request, send capture intent, and
2960    // stream on all streams
2961    if (mFirstRequest) {
2962        // send an unconfigure to the backend so that the isp
2963        // resources are deallocated
2964        if (!mFirstConfiguration) {
2965            cam_stream_size_info_t stream_config_info;
2966            int32_t hal_version = CAM_HAL_V3;
2967            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
2968            stream_config_info.buffer_info.min_buffers =
2969                    MIN_INFLIGHT_REQUESTS;
2970            stream_config_info.buffer_info.max_buffers =
2971                    MAX_INFLIGHT_REQUESTS;
2972            clear_metadata_buffer(mParameters);
2973            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2974                    CAM_INTF_PARM_HAL_VERSION, hal_version);
2975            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2976                    CAM_INTF_META_STREAM_INFO, stream_config_info);
2977            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2978                    mParameters);
2979            if (rc < 0) {
2980                ALOGE("%s: set_parms for unconfigure failed", __func__);
2981                pthread_mutex_unlock(&mMutex);
2982                return rc;
2983            }
2984        }
2985        m_perfLock.lock_acq();
2986        /* get eis information for stream configuration */
2987        cam_is_type_t is_type;
2988        char is_type_value[PROPERTY_VALUE_MAX];
2989        property_get("persist.camera.is_type", is_type_value, "0");
2990        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2991
2992        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2993            int32_t hal_version = CAM_HAL_V3;
2994            uint8_t captureIntent =
2995                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2996            mCaptureIntent = captureIntent;
2997            clear_metadata_buffer(mParameters);
2998            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
2999            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3000        }
3001
3002        //If EIS is enabled, turn it on for video
3003        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3004        int32_t vsMode;
3005        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3006        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3007            rc = BAD_VALUE;
3008        }
3009
3010        //IS type will be 0 unless EIS is supported. If EIS is supported
3011        //it could either be 1 or 4 depending on the stream and video size
3012        if (setEis) {
3013            if (!m_bEisSupportedSize) {
3014                is_type = IS_TYPE_DIS;
3015            } else {
3016                is_type = IS_TYPE_EIS_2_0;
3017            }
3018            mStreamConfigInfo.is_type = is_type;
3019        } else {
3020            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3021        }
3022
3023        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3024                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3025        int32_t tintless_value = 1;
3026        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3027                CAM_INTF_PARM_TINTLESS, tintless_value);
3028        //Disable CDS for HFR mode and if mPprocBypass = true.
3029        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3030        //after every configure_stream
3031        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3032                (m_bIsVideo)) {
3033            int32_t cds = CAM_CDS_MODE_OFF;
3034            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3035                    CAM_INTF_PARM_CDS_MODE, cds))
3036                ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3037
3038        }
3039        setMobicat();
3040
3041        /* Set fps and hfr mode while sending meta stream info so that sensor
3042         * can configure appropriate streaming mode */
3043        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3044        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3045            rc = setHalFpsRange(meta, mParameters);
3046            if (rc != NO_ERROR) {
3047                ALOGE("%s: setHalFpsRange failed", __func__);
3048            }
3049        }
3050        if (meta.exists(ANDROID_CONTROL_MODE)) {
3051            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3052            rc = extractSceneMode(meta, metaMode, mParameters);
3053            if (rc != NO_ERROR) {
3054                ALOGE("%s: extractSceneMode failed", __func__);
3055            }
3056        }
3057
3058        //TODO: validate the arguments, HSV scenemode should have only the
3059        //advertised fps ranges
3060
3061        /*set the capture intent, hal version, tintless, stream info,
3062         *and disenable parameters to the backend*/
3063        CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3064        mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3065                    mParameters);
3066
3067        cam_dimension_t sensor_dim;
3068        memset(&sensor_dim, 0, sizeof(sensor_dim));
3069        rc = getSensorOutputSize(sensor_dim);
3070        if (rc != NO_ERROR) {
3071            ALOGE("%s: Failed to get sensor output size", __func__);
3072            pthread_mutex_unlock(&mMutex);
3073            goto error_exit;
3074        }
3075
3076        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3077                gCamCapability[mCameraId]->active_array_size.height,
3078                sensor_dim.width, sensor_dim.height);
3079
3080        /* Set batchmode before initializing channel. Since registerBuffer
3081         * internally initializes some of the channels, better set batchmode
3082         * even before first register buffer */
3083        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3084            it != mStreamInfo.end(); it++) {
3085            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3086            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3087                    && mBatchSize) {
3088                rc = channel->setBatchSize(mBatchSize);
3089                //Disable per frame map unmap for HFR/batchmode case
3090                rc |= channel->setPerFrameMapUnmap(false);
3091                if (NO_ERROR != rc) {
3092                    ALOGE("%s : Channel init failed %d", __func__, rc);
3093                    pthread_mutex_unlock(&mMutex);
3094                    goto error_exit;
3095                }
3096            }
3097        }
3098
3099        //First initialize all streams
3100        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3101            it != mStreamInfo.end(); it++) {
3102            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3103            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3104               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3105               setEis)
3106                rc = channel->initialize(is_type);
3107            else {
3108                rc = channel->initialize(IS_TYPE_NONE);
3109            }
3110            if (NO_ERROR != rc) {
3111                ALOGE("%s : Channel initialization failed %d", __func__, rc);
3112                pthread_mutex_unlock(&mMutex);
3113                goto error_exit;
3114            }
3115        }
3116
3117        if (mRawDumpChannel) {
3118            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3119            if (rc != NO_ERROR) {
3120                ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3121                pthread_mutex_unlock(&mMutex);
3122                goto error_exit;
3123            }
3124        }
3125        if (mSupportChannel) {
3126            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3127            if (rc < 0) {
3128                ALOGE("%s: Support channel initialization failed", __func__);
3129                pthread_mutex_unlock(&mMutex);
3130                goto error_exit;
3131            }
3132        }
3133        if (mAnalysisChannel) {
3134            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3135            if (rc < 0) {
3136                ALOGE("%s: Analysis channel initialization failed", __func__);
3137                pthread_mutex_unlock(&mMutex);
3138                goto error_exit;
3139            }
3140        }
3141        if (mDummyBatchChannel) {
3142            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3143            if (rc < 0) {
3144                ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3145                pthread_mutex_unlock(&mMutex);
3146                goto error_exit;
3147            }
3148            rc = mDummyBatchChannel->initialize(is_type);
3149            if (rc < 0) {
3150                ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3151                pthread_mutex_unlock(&mMutex);
3152                goto error_exit;
3153            }
3154        }
3155
3156        // Set bundle info
3157        rc = setBundleInfo();
3158        if (rc < 0) {
3159            ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3160            pthread_mutex_unlock(&mMutex);
3161            goto error_exit;
3162        }
3163
3164        //Then start them.
3165        CDBG_HIGH("%s: Start META Channel", __func__);
3166        rc = mMetadataChannel->start();
3167        if (rc < 0) {
3168            ALOGE("%s: META channel start failed", __func__);
3169            pthread_mutex_unlock(&mMutex);
3170            goto error_exit;
3171        }
3172
3173        if (mAnalysisChannel) {
3174            rc = mAnalysisChannel->start();
3175            if (rc < 0) {
3176                ALOGE("%s: Analysis channel start failed", __func__);
3177                mMetadataChannel->stop();
3178                pthread_mutex_unlock(&mMutex);
3179                goto error_exit;
3180            }
3181        }
3182
3183        if (mSupportChannel) {
3184            rc = mSupportChannel->start();
3185            if (rc < 0) {
3186                ALOGE("%s: Support channel start failed", __func__);
3187                mMetadataChannel->stop();
3188                /* Although support and analysis are mutually exclusive today
3189                   adding it in anycase for future proofing */
3190                if (mAnalysisChannel) {
3191                    mAnalysisChannel->stop();
3192                }
3193                pthread_mutex_unlock(&mMutex);
3194                goto error_exit;
3195            }
3196        }
3197        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3198            it != mStreamInfo.end(); it++) {
3199            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3200            CDBG_HIGH("%s: Start Processing Channel mask=%d",
3201                    __func__, channel->getStreamTypeMask());
3202            rc = channel->start();
3203            if (rc < 0) {
3204                ALOGE("%s: channel start failed", __func__);
3205                pthread_mutex_unlock(&mMutex);
3206                goto error_exit;
3207            }
3208        }
3209
3210        if (mRawDumpChannel) {
3211            CDBG("%s: Starting raw dump stream",__func__);
3212            rc = mRawDumpChannel->start();
3213            if (rc != NO_ERROR) {
3214                ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3215                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3216                      it != mStreamInfo.end(); it++) {
3217                    QCamera3Channel *channel =
3218                        (QCamera3Channel *)(*it)->stream->priv;
3219                    ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3220                        channel->getStreamTypeMask());
3221                    channel->stop();
3222                }
3223                if (mSupportChannel)
3224                    mSupportChannel->stop();
3225                if (mAnalysisChannel) {
3226                    mAnalysisChannel->stop();
3227                }
3228                mMetadataChannel->stop();
3229                pthread_mutex_unlock(&mMutex);
3230                goto error_exit;
3231            }
3232        }
3233
3234        if (mChannelHandle) {
3235
3236            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3237                    mChannelHandle);
3238            if (rc != NO_ERROR) {
3239                ALOGE("%s: start_channel failed %d", __func__, rc);
3240                pthread_mutex_unlock(&mMutex);
3241                goto error_exit;
3242            }
3243        }
3244
3245
3246        goto no_error;
3247error_exit:
3248        m_perfLock.lock_rel();
3249        return rc;
3250no_error:
3251        m_perfLock.lock_rel();
3252
3253        mWokenUpByDaemon = false;
3254        mPendingLiveRequest = 0;
3255        mFirstConfiguration = false;
3256        enablePowerHint();
3257    }
3258
3259    uint32_t frameNumber = request->frame_number;
3260    cam_stream_ID_t streamID;
3261
3262    if (meta.exists(ANDROID_REQUEST_ID)) {
3263        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3264        mCurrentRequestId = request_id;
3265        CDBG("%s: Received request with id: %d",__func__, request_id);
3266    } else if (mFirstRequest || mCurrentRequestId == -1){
3267        ALOGE("%s: Unable to find request id field, \
3268                & no previous id available", __func__);
3269        pthread_mutex_unlock(&mMutex);
3270        return NAME_NOT_FOUND;
3271    } else {
3272        CDBG("%s: Re-using old request id", __func__);
3273        request_id = mCurrentRequestId;
3274    }
3275
3276    CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3277                                    __func__, __LINE__,
3278                                    request->num_output_buffers,
3279                                    request->input_buffer,
3280                                    frameNumber);
3281    // Acquire all request buffers first
3282    streamID.num_streams = 0;
3283    int blob_request = 0;
3284    uint32_t snapshotStreamId = 0;
3285    for (size_t i = 0; i < request->num_output_buffers; i++) {
3286        const camera3_stream_buffer_t& output = request->output_buffers[i];
3287        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3288
3289        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3290            //Call function to store local copy of jpeg data for encode params.
3291            blob_request = 1;
3292            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3293        }
3294
3295        if (output.acquire_fence != -1) {
3296           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3297           close(output.acquire_fence);
3298           if (rc != OK) {
3299              ALOGE("%s: sync wait failed %d", __func__, rc);
3300              pthread_mutex_unlock(&mMutex);
3301              return rc;
3302           }
3303        }
3304
3305        streamID.streamID[streamID.num_streams] =
3306            channel->getStreamID(channel->getStreamTypeMask());
3307        streamID.num_streams++;
3308
3309        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3310            isVidBufRequested = true;
3311        }
3312    }
3313
3314    if (blob_request && mRawDumpChannel) {
3315        CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3316        streamID.streamID[streamID.num_streams] =
3317            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3318        streamID.num_streams++;
3319    }
3320
3321    if(request->input_buffer == NULL) {
3322        /* Parse the settings:
3323         * - For every request in NORMAL MODE
3324         * - For every request in HFR mode during preview only case
3325         * - For first request of every batch in HFR mode during video
3326         * recording. In batchmode the same settings except frame number is
3327         * repeated in each request of the batch.
3328         */
3329        if (!mBatchSize ||
3330           (mBatchSize && !isVidBufRequested) ||
3331           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3332            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3333            if (rc < 0) {
3334                ALOGE("%s: fail to set frame parameters", __func__);
3335                pthread_mutex_unlock(&mMutex);
3336                return rc;
3337            }
3338        }
3339        /* For batchMode HFR, setFrameParameters is not called for every
3340         * request. But only frame number of the latest request is parsed */
3341        if (mBatchSize && ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3342                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3343            ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3344            return BAD_VALUE;
3345        }
3346        if (mNeedSensorRestart) {
3347            /* Unlock the mutex as restartSensor waits on the channels to be
3348             * stopped, which in turn calls stream callback functions -
3349             * handleBufferWithLock and handleMetadataWithLock */
3350            pthread_mutex_unlock(&mMutex);
3351            rc = dynamicUpdateMetaStreamInfo();
3352            if (rc != NO_ERROR) {
3353                ALOGE("%s: Restarting the sensor failed", __func__);
3354                return BAD_VALUE;
3355            }
3356            mNeedSensorRestart = false;
3357            pthread_mutex_lock(&mMutex);
3358        }
3359    } else {
3360
3361        if (request->input_buffer->acquire_fence != -1) {
3362           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3363           close(request->input_buffer->acquire_fence);
3364           if (rc != OK) {
3365              ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3366              pthread_mutex_unlock(&mMutex);
3367              return rc;
3368           }
3369        }
3370    }
3371
3372    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3373        mLastCustIntentFrmNum = frameNumber;
3374    }
3375    /* Update pending request list and pending buffers map */
3376    PendingRequestInfo pendingRequest;
3377    pendingRequestIterator latestRequest;
3378    pendingRequest.frame_number = frameNumber;
3379    pendingRequest.num_buffers = request->num_output_buffers;
3380    pendingRequest.request_id = request_id;
3381    pendingRequest.blob_request = blob_request;
3382    pendingRequest.timestamp = 0;
3383    pendingRequest.bUrgentReceived = 0;
3384    if (request->input_buffer) {
3385        pendingRequest.input_buffer =
3386                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3387        *(pendingRequest.input_buffer) = *(request->input_buffer);
3388        pInputBuffer = pendingRequest.input_buffer;
3389    } else {
3390       pendingRequest.input_buffer = NULL;
3391       pInputBuffer = NULL;
3392    }
3393
3394    pendingRequest.pipeline_depth = 0;
3395    pendingRequest.partial_result_cnt = 0;
3396    extractJpegMetadata(mCurJpegMeta, request);
3397    pendingRequest.jpegMetadata = mCurJpegMeta;
3398    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3399
3400    //extract capture intent
3401    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3402        mCaptureIntent =
3403                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3404    }
3405    pendingRequest.capture_intent = mCaptureIntent;
3406
3407    for (size_t i = 0; i < request->num_output_buffers; i++) {
3408        RequestedBufferInfo requestedBuf;
3409        memset(&requestedBuf, 0, sizeof(requestedBuf));
3410        requestedBuf.stream = request->output_buffers[i].stream;
3411        requestedBuf.buffer = NULL;
3412        pendingRequest.buffers.push_back(requestedBuf);
3413
3414        // Add to buffer handle the pending buffers list
3415        PendingBufferInfo bufferInfo;
3416        bufferInfo.frame_number = frameNumber;
3417        bufferInfo.buffer = request->output_buffers[i].buffer;
3418        bufferInfo.stream = request->output_buffers[i].stream;
3419        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3420        mPendingBuffersMap.num_buffers++;
3421        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3422        CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3423                __func__, frameNumber, bufferInfo.buffer,
3424                channel->getStreamTypeMask(), bufferInfo.stream->format);
3425    }
3426    latestRequest = mPendingRequestsList.insert(
3427            mPendingRequestsList.end(), pendingRequest);
3428    if(mFlush) {
3429        pthread_mutex_unlock(&mMutex);
3430        return NO_ERROR;
3431    }
3432
3433    // Notify metadata channel we receive a request
3434    mMetadataChannel->request(NULL, frameNumber);
3435
3436    if(request->input_buffer != NULL){
3437        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3438        if (NO_ERROR != rc) {
3439            ALOGE("%s: fail to set reproc parameters", __func__);
3440            pthread_mutex_unlock(&mMutex);
3441            return rc;
3442        }
3443    }
3444
3445    // Call request on other streams
3446    uint32_t streams_need_metadata = 0;
3447    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3448    for (size_t i = 0; i < request->num_output_buffers; i++) {
3449        const camera3_stream_buffer_t& output = request->output_buffers[i];
3450        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3451
3452        if (channel == NULL) {
3453            ALOGE("%s: invalid channel pointer for stream", __func__);
3454            continue;
3455        }
3456
3457        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3458            if(request->input_buffer != NULL){
3459                rc = channel->request(output.buffer, frameNumber,
3460                        pInputBuffer, &mReprocMeta);
3461                if (rc < 0) {
3462                    ALOGE("%s: Fail to request on picture channel", __func__);
3463                    pthread_mutex_unlock(&mMutex);
3464                    return rc;
3465                }
3466            } else {
3467                CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3468                        __LINE__, output.buffer, frameNumber);
3469                if (!request->settings) {
3470                    rc = channel->request(output.buffer, frameNumber,
3471                            NULL, mPrevParameters);
3472                } else {
3473                    rc = channel->request(output.buffer, frameNumber,
3474                            NULL, mParameters);
3475                }
3476                if (rc < 0) {
3477                    ALOGE("%s: Fail to request on picture channel", __func__);
3478                    pthread_mutex_unlock(&mMutex);
3479                    return rc;
3480                }
3481                pendingBufferIter->need_metadata = true;
3482                streams_need_metadata++;
3483            }
3484        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3485            bool needMetadata = false;
3486            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3487            rc = yuvChannel->request(output.buffer, frameNumber,
3488                    pInputBuffer,
3489                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3490            if (rc < 0) {
3491                ALOGE("%s: Fail to request on YUV channel", __func__);
3492                pthread_mutex_unlock(&mMutex);
3493                return rc;
3494            }
3495            pendingBufferIter->need_metadata = needMetadata;
3496            if (needMetadata)
3497                streams_need_metadata += 1;
3498            CDBG("%s: calling YUV channel request, need_metadata is %d",
3499                    __func__, needMetadata);
3500        } else {
3501            CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3502                __LINE__, output.buffer, frameNumber);
3503            rc = channel->request(output.buffer, frameNumber);
3504            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3505                    && mBatchSize) {
3506                mToBeQueuedVidBufs++;
3507                if (mToBeQueuedVidBufs == mBatchSize) {
3508                    channel->queueBatchBuf();
3509                }
3510            }
3511            if (rc < 0) {
3512                ALOGE("%s: request failed", __func__);
3513                pthread_mutex_unlock(&mMutex);
3514                return rc;
3515            }
3516        }
3517        pendingBufferIter++;
3518    }
3519
3520    //If 2 streams have need_metadata set to true, fail the request, unless
3521    //we copy/reference count the metadata buffer
3522    if (streams_need_metadata > 1) {
3523        ALOGE("%s: not supporting request in which two streams requires"
3524                " 2 HAL metadata for reprocessing", __func__);
3525        pthread_mutex_unlock(&mMutex);
3526        return -EINVAL;
3527    }
3528
3529    if(request->input_buffer == NULL) {
3530        /* Set the parameters to backend:
3531         * - For every request in NORMAL MODE
3532         * - For every request in HFR mode during preview only case
3533         * - Once every batch in HFR mode during video recording
3534         */
3535        if (!mBatchSize ||
3536           (mBatchSize && !isVidBufRequested) ||
3537           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3538            CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3539                    __func__, mBatchSize, isVidBufRequested,
3540                    mToBeQueuedVidBufs);
3541            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3542                    mParameters);
3543            if (rc < 0) {
3544                ALOGE("%s: set_parms failed", __func__);
3545            }
3546            /* reset to zero coz, the batch is queued */
3547            mToBeQueuedVidBufs = 0;
3548        }
3549        mPendingLiveRequest++;
3550    }
3551
3552    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3553
3554    mFirstRequest = false;
3555    // Added a timed condition wait
3556    struct timespec ts;
3557    uint8_t isValidTimeout = 1;
3558    rc = clock_gettime(CLOCK_REALTIME, &ts);
3559    if (rc < 0) {
3560      isValidTimeout = 0;
3561      ALOGE("%s: Error reading the real time clock!!", __func__);
3562    }
3563    else {
3564      // Make timeout as 5 sec for request to be honored
3565      ts.tv_sec += 5;
3566    }
3567    //Block on conditional variable
3568    if (mBatchSize) {
3569        /* For HFR, more buffers are dequeued upfront to improve the performance */
3570        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3571        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3572    }
3573    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3574        if (!isValidTimeout) {
3575            CDBG("%s: Blocking on conditional wait", __func__);
3576            pthread_cond_wait(&mRequestCond, &mMutex);
3577        }
3578        else {
3579            CDBG("%s: Blocking on timed conditional wait", __func__);
3580            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3581            if (rc == ETIMEDOUT) {
3582                rc = -ENODEV;
3583                ALOGE("%s: Unblocked on timeout!!!!", __func__);
3584                break;
3585            }
3586        }
3587        CDBG("%s: Unblocked", __func__);
3588        if (mWokenUpByDaemon) {
3589            mWokenUpByDaemon = false;
3590            if (mPendingLiveRequest < maxInFlightRequests)
3591                break;
3592        }
3593    }
3594    pthread_mutex_unlock(&mMutex);
3595
3596    return rc;
3597}
3598
3599/*===========================================================================
3600 * FUNCTION   : dump
3601 *
3602 * DESCRIPTION:
3603 *
3604 * PARAMETERS :
3605 *
3606 *
3607 * RETURN     :
3608 *==========================================================================*/
3609void QCamera3HardwareInterface::dump(int fd)
3610{
3611    pthread_mutex_lock(&mMutex);
3612    dprintf(fd, "\n Camera HAL3 information Begin \n");
3613
3614    dprintf(fd, "\nNumber of pending requests: %zu \n",
3615        mPendingRequestsList.size());
3616    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3617    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3618    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3619    for(pendingRequestIterator i = mPendingRequestsList.begin();
3620            i != mPendingRequestsList.end(); i++) {
3621        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3622        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3623        i->input_buffer);
3624    }
3625    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3626                mPendingBuffersMap.num_buffers);
3627    dprintf(fd, "-------+------------------\n");
3628    dprintf(fd, " Frame | Stream type mask \n");
3629    dprintf(fd, "-------+------------------\n");
3630    for(List<PendingBufferInfo>::iterator i =
3631        mPendingBuffersMap.mPendingBufferList.begin();
3632        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3633        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3634        dprintf(fd, " %5d | %11d \n",
3635                i->frame_number, channel->getStreamTypeMask());
3636    }
3637    dprintf(fd, "-------+------------------\n");
3638
3639    dprintf(fd, "\nPending frame drop list: %zu\n",
3640        mPendingFrameDropList.size());
3641    dprintf(fd, "-------+-----------\n");
3642    dprintf(fd, " Frame | Stream ID \n");
3643    dprintf(fd, "-------+-----------\n");
3644    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3645        i != mPendingFrameDropList.end(); i++) {
3646        dprintf(fd, " %5d | %9d \n",
3647            i->frame_number, i->stream_ID);
3648    }
3649    dprintf(fd, "-------+-----------\n");
3650
3651    dprintf(fd, "\n Camera HAL3 information End \n");
3652
3653    /* use dumpsys media.camera as trigger to send update debug level event */
3654    mUpdateDebugLevel = true;
3655    pthread_mutex_unlock(&mMutex);
3656    return;
3657}
3658
3659/*===========================================================================
3660 * FUNCTION   : flush
3661 *
3662 * DESCRIPTION:
3663 *
3664 * PARAMETERS :
3665 *
3666 *
3667 * RETURN     :
3668 *==========================================================================*/
3669int QCamera3HardwareInterface::flush()
3670{
3671    ATRACE_CALL();
3672    int32_t rc = NO_ERROR;
3673
3674    CDBG("%s: Unblocking Process Capture Request", __func__);
3675    pthread_mutex_lock(&mMutex);
3676    mFlush = true;
3677    pthread_mutex_unlock(&mMutex);
3678
3679    rc = stopAllChannels();
3680    if (rc < 0) {
3681        ALOGE("%s: stopAllChannels failed", __func__);
3682        return rc;
3683    }
3684    if (mChannelHandle) {
3685        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
3686                mChannelHandle);
3687    }
3688
3689    // Reset bundle info
3690    rc = setBundleInfo();
3691    if (rc < 0) {
3692        ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3693        return rc;
3694    }
3695
3696    // Mutex Lock
3697    pthread_mutex_lock(&mMutex);
3698
3699    // Unblock process_capture_request
3700    mPendingLiveRequest = 0;
3701    pthread_cond_signal(&mRequestCond);
3702
3703    rc = notifyErrorForPendingRequests();
3704    if (rc < 0) {
3705        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3706        pthread_mutex_unlock(&mMutex);
3707        return rc;
3708    }
3709
3710    mFlush = false;
3711
3712    // Start the Streams/Channels
3713    rc = startAllChannels();
3714    if (rc < 0) {
3715        ALOGE("%s: startAllChannels failed", __func__);
3716        pthread_mutex_unlock(&mMutex);
3717        return rc;
3718    }
3719
3720    if (mChannelHandle) {
3721        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3722                    mChannelHandle);
3723        if (rc < 0) {
3724            ALOGE("%s: start_channel failed", __func__);
3725            pthread_mutex_unlock(&mMutex);
3726            return rc;
3727        }
3728    }
3729
3730    pthread_mutex_unlock(&mMutex);
3731
3732    return 0;
3733}
3734
3735/*===========================================================================
3736 * FUNCTION   : captureResultCb
3737 *
3738 * DESCRIPTION: Callback handler for all capture result
3739 *              (streams, as well as metadata)
3740 *
3741 * PARAMETERS :
3742 *   @metadata : metadata information
3743 *   @buffer   : actual gralloc buffer to be returned to frameworks.
3744 *               NULL if metadata.
3745 *
3746 * RETURN     : NONE
3747 *==========================================================================*/
3748void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
3749                camera3_stream_buffer_t *buffer, uint32_t frame_number)
3750{
3751    if (metadata_buf) {
3752        if (mBatchSize) {
3753            handleBatchMetadata(metadata_buf,
3754                    true /* free_and_bufdone_meta_buf */);
3755        } else { /* mBatchSize = 0 */
3756            hdrPlusPerfLock(metadata_buf);
3757            pthread_mutex_lock(&mMutex);
3758            handleMetadataWithLock(metadata_buf,
3759                    true /* free_and_bufdone_meta_buf */);
3760            pthread_mutex_unlock(&mMutex);
3761        }
3762    } else {
3763        pthread_mutex_lock(&mMutex);
3764        handleBufferWithLock(buffer, frame_number);
3765        pthread_mutex_unlock(&mMutex);
3766    }
3767    return;
3768}
3769
3770/*===========================================================================
3771 * FUNCTION   : getReprocessibleOutputStreamId
3772 *
3773 * DESCRIPTION: Get source output stream id for the input reprocess stream
3774 *              based on size and format, which would be the largest
3775 *              output stream if an input stream exists.
3776 *
3777 * PARAMETERS :
3778 *   @id      : return the stream id if found
3779 *
3780 * RETURN     : int32_t type of status
3781 *              NO_ERROR  -- success
3782 *              none-zero failure code
3783 *==========================================================================*/
3784int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
3785{
3786    stream_info_t* stream = NULL;
3787
3788    /* check if any output or bidirectional stream with the same size and format
3789       and return that stream */
3790    if ((mInputStreamInfo.dim.width > 0) &&
3791            (mInputStreamInfo.dim.height > 0)) {
3792        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3793                it != mStreamInfo.end(); it++) {
3794
3795            camera3_stream_t *stream = (*it)->stream;
3796            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
3797                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
3798                    (stream->format == mInputStreamInfo.format)) {
3799                // Usage flag for an input stream and the source output stream
3800                // may be different.
3801                CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
3802                CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
3803                        __func__, stream->usage, mInputStreamInfo.usage);
3804
3805                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
3806                if (channel != NULL && channel->mStreams[0]) {
3807                    id = channel->mStreams[0]->getMyServerID();
3808                    return NO_ERROR;
3809                }
3810            }
3811        }
3812    } else {
3813        CDBG("%s: No input stream, so no reprocessible output stream", __func__);
3814    }
3815    return NAME_NOT_FOUND;
3816}
3817
3818/*===========================================================================
3819 * FUNCTION   : lookupFwkName
3820 *
3821 * DESCRIPTION: In case the enum is not same in fwk and backend
3822 *              make sure the parameter is correctly propogated
3823 *
3824 * PARAMETERS  :
3825 *   @arr      : map between the two enums
3826 *   @len      : len of the map
3827 *   @hal_name : name of the hal_parm to map
3828 *
3829 * RETURN     : int type of status
3830 *              fwk_name  -- success
3831 *              none-zero failure code
3832 *==========================================================================*/
3833template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
3834        size_t len, halType hal_name)
3835{
3836
3837    for (size_t i = 0; i < len; i++) {
3838        if (arr[i].hal_name == hal_name) {
3839            return arr[i].fwk_name;
3840        }
3841    }
3842
3843    /* Not able to find matching framework type is not necessarily
3844     * an error case. This happens when mm-camera supports more attributes
3845     * than the frameworks do */
3846    CDBG_HIGH("%s: Cannot find matching framework type", __func__);
3847    return NAME_NOT_FOUND;
3848}
3849
3850/*===========================================================================
3851 * FUNCTION   : lookupHalName
3852 *
3853 * DESCRIPTION: In case the enum is not same in fwk and backend
3854 *              make sure the parameter is correctly propogated
3855 *
3856 * PARAMETERS  :
3857 *   @arr      : map between the two enums
3858 *   @len      : len of the map
3859 *   @fwk_name : name of the hal_parm to map
3860 *
3861 * RETURN     : int32_t type of status
3862 *              hal_name  -- success
3863 *              none-zero failure code
3864 *==========================================================================*/
3865template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
3866        size_t len, fwkType fwk_name)
3867{
3868    for (size_t i = 0; i < len; i++) {
3869        if (arr[i].fwk_name == fwk_name) {
3870            return arr[i].hal_name;
3871        }
3872    }
3873
3874    ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
3875    return NAME_NOT_FOUND;
3876}
3877
3878/*===========================================================================
3879 * FUNCTION   : lookupProp
3880 *
3881 * DESCRIPTION: lookup a value by its name
3882 *
3883 * PARAMETERS :
3884 *   @arr     : map between the two enums
3885 *   @len     : size of the map
3886 *   @name    : name to be looked up
3887 *
3888 * RETURN     : Value if found
3889 *              CAM_CDS_MODE_MAX if not found
3890 *==========================================================================*/
3891template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
3892        size_t len, const char *name)
3893{
3894    if (name) {
3895        for (size_t i = 0; i < len; i++) {
3896            if (!strcmp(arr[i].desc, name)) {
3897                return arr[i].val;
3898            }
3899        }
3900    }
3901    return CAM_CDS_MODE_MAX;
3902}
3903
3904/*===========================================================================
3905 *
3906 * DESCRIPTION:
3907 *
3908 * PARAMETERS :
3909 *   @metadata : metadata information from callback
3910 *   @timestamp: metadata buffer timestamp
3911 *   @request_id: request id
3912 *   @jpegMetadata: additional jpeg metadata
3913 *   @pprocDone: whether internal offline postprocsesing is done
3914 *
3915 * RETURN     : camera_metadata_t*
3916 *              metadata in a format specified by fwk
3917 *==========================================================================*/
3918camera_metadata_t*
3919QCamera3HardwareInterface::translateFromHalMetadata(
3920                                 metadata_buffer_t *metadata,
3921                                 nsecs_t timestamp,
3922                                 int32_t request_id,
3923                                 const CameraMetadata& jpegMetadata,
3924                                 uint8_t pipeline_depth,
3925                                 uint8_t capture_intent,
3926                                 bool pprocDone)
3927{
3928    CameraMetadata camMetadata;
3929    camera_metadata_t *resultMetadata;
3930
3931    if (jpegMetadata.entryCount())
3932        camMetadata.append(jpegMetadata);
3933
3934    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
3935    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
3936    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
3937    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
3938
3939    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
3940        int64_t fwk_frame_number = *frame_number;
3941        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
3942    }
3943
3944    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
3945        int32_t fps_range[2];
3946        fps_range[0] = (int32_t)float_range->min_fps;
3947        fps_range[1] = (int32_t)float_range->max_fps;
3948        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3949                                      fps_range, 2);
3950        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
3951            __func__, fps_range[0], fps_range[1]);
3952    }
3953
3954    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
3955        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
3956    }
3957
3958    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
3959        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
3960                METADATA_MAP_SIZE(SCENE_MODES_MAP),
3961                *sceneMode);
3962        if (NAME_NOT_FOUND != val) {
3963            uint8_t fwkSceneMode = (uint8_t)val;
3964            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
3965            CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
3966                    __func__, fwkSceneMode);
3967        }
3968    }
3969
3970    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
3971        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
3972        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
3973    }
3974
3975    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
3976        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
3977        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
3978    }
3979
3980    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
3981        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
3982        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
3983    }
3984
3985    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
3986            CAM_INTF_META_EDGE_MODE, metadata) {
3987        uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
3988        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
3989    }
3990
3991    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
3992        uint8_t fwk_flashPower = (uint8_t) *flashPower;
3993        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
3994    }
3995
3996    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
3997        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
3998    }
3999
4000    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4001        if (0 <= *flashState) {
4002            uint8_t fwk_flashState = (uint8_t) *flashState;
4003            if (!gCamCapability[mCameraId]->flash_available) {
4004                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4005            }
4006            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4007        }
4008    }
4009
4010    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4011        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4012        if (NAME_NOT_FOUND != val) {
4013            uint8_t fwk_flashMode = (uint8_t)val;
4014            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4015        }
4016    }
4017
4018    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4019        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4020        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4021    }
4022
4023    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4024        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4025    }
4026
4027    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4028        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4029    }
4030
4031    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4032        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4033    }
4034
4035    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4036        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4037        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4038    }
4039
4040    /*EIS is currently not hooked up to the app, so set the mode to OFF*/
4041    uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4042    camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
4043
4044    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4045        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4046        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4047    }
4048
4049    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4050        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4051    }
4052
4053    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4054        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4055
4056        CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4057          blackLevelSourcePattern->cam_black_level[0],
4058          blackLevelSourcePattern->cam_black_level[1],
4059          blackLevelSourcePattern->cam_black_level[2],
4060          blackLevelSourcePattern->cam_black_level[3]);
4061    }
4062
4063    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4064        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4065        float fwk_blackLevelInd[4];
4066
4067        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4068        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4069        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4070        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4071
4072        CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4073          blackLevelAppliedPattern->cam_black_level[0],
4074          blackLevelAppliedPattern->cam_black_level[1],
4075          blackLevelAppliedPattern->cam_black_level[2],
4076          blackLevelAppliedPattern->cam_black_level[3]);
4077        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4078        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4079    }
4080
4081
4082    if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4083        gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4084        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4085        for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4086            opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4087        }
4088        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4089                opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4090    }
4091
4092    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4093            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4094        int32_t scalerCropRegion[4];
4095        scalerCropRegion[0] = hScalerCropRegion->left;
4096        scalerCropRegion[1] = hScalerCropRegion->top;
4097        scalerCropRegion[2] = hScalerCropRegion->width;
4098        scalerCropRegion[3] = hScalerCropRegion->height;
4099
4100        // Adjust crop region from sensor output coordinate system to active
4101        // array coordinate system.
4102        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4103                scalerCropRegion[2], scalerCropRegion[3]);
4104
4105        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4106    }
4107
4108    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4109        CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4110        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4111    }
4112
4113    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4114            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4115        CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4116        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4117    }
4118
4119    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4120            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4121        CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4122        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4123                sensorRollingShutterSkew, 1);
4124    }
4125
4126    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4127        CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4128        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4129
4130        //calculate the noise profile based on sensitivity
4131        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4132        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4133        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4134        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4135            noise_profile[i]   = noise_profile_S;
4136            noise_profile[i+1] = noise_profile_O;
4137        }
4138        CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4139                noise_profile_S, noise_profile_O);
4140        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4141                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4142    }
4143
4144    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4145        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4146        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4147    }
4148
4149    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4150        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4151                *faceDetectMode);
4152        if (NAME_NOT_FOUND != val) {
4153            uint8_t fwk_faceDetectMode = (uint8_t)val;
4154            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4155
4156            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4157                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4158                        CAM_INTF_META_FACE_DETECTION, metadata) {
4159                    uint8_t numFaces = MIN(
4160                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4161                    int32_t faceIds[MAX_ROI];
4162                    uint8_t faceScores[MAX_ROI];
4163                    int32_t faceRectangles[MAX_ROI * 4];
4164                    int32_t faceLandmarks[MAX_ROI * 6];
4165                    size_t j = 0, k = 0;
4166
4167                    for (size_t i = 0; i < numFaces; i++) {
4168                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4169                        // Adjust crop region from sensor output coordinate system to active
4170                        // array coordinate system.
4171                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4172                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4173                                rect.width, rect.height);
4174
4175                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4176                                faceRectangles+j, -1);
4177
4178                        // Map the co-ordinate sensor output coordinate system to active
4179                        // array coordinate system.
4180                        cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4181                        mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4182                                face.left_eye_center.y);
4183                        mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4184                                face.right_eye_center.y);
4185                        mCropRegionMapper.toActiveArray(face.mouth_center.x,
4186                                face.mouth_center.y);
4187
4188                        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4189                        j+= 4;
4190                        k+= 6;
4191                    }
4192                    if (numFaces <= 0) {
4193                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4194                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4195                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4196                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4197                    }
4198
4199                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4200                            numFaces);
4201                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4202                            faceRectangles, numFaces * 4U);
4203                    if (fwk_faceDetectMode ==
4204                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4205                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4206                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4207                                faceLandmarks, numFaces * 6U);
4208                   }
4209                }
4210            }
4211        }
4212    }
4213
4214    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4215        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4216        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4217    }
4218
4219    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4220            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4221        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4222        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4223    }
4224
4225    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4226            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4227        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4228                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4229    }
4230
4231    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4232            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4233        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4234                CAM_MAX_SHADING_MAP_HEIGHT);
4235        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4236                CAM_MAX_SHADING_MAP_WIDTH);
4237        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4238                lensShadingMap->lens_shading, 4U * map_width * map_height);
4239    }
4240
4241    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4242        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4243        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4244    }
4245
4246    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4247        //Populate CAM_INTF_META_TONEMAP_CURVES
4248        /* ch0 = G, ch 1 = B, ch 2 = R*/
4249        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4250            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4251                    __func__, tonemap->tonemap_points_cnt,
4252                    CAM_MAX_TONEMAP_CURVE_SIZE);
4253            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4254        }
4255
4256        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4257                        &tonemap->curves[0].tonemap_points[0][0],
4258                        tonemap->tonemap_points_cnt * 2);
4259
4260        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4261                        &tonemap->curves[1].tonemap_points[0][0],
4262                        tonemap->tonemap_points_cnt * 2);
4263
4264        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4265                        &tonemap->curves[2].tonemap_points[0][0],
4266                        tonemap->tonemap_points_cnt * 2);
4267    }
4268
4269    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4270            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4271        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4272                CC_GAINS_COUNT);
4273    }
4274
4275    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4276            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4277        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4278                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4279                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4280    }
4281
4282    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4283            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4284        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4285            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4286                    __func__, toneCurve->tonemap_points_cnt,
4287                    CAM_MAX_TONEMAP_CURVE_SIZE);
4288            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4289        }
4290        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4291                (float*)toneCurve->curve.tonemap_points,
4292                toneCurve->tonemap_points_cnt * 2);
4293    }
4294
4295    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4296            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4297        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4298                predColorCorrectionGains->gains, 4);
4299    }
4300
4301    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4302            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4303        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4304                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4305                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4306    }
4307
4308    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4309        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4310    }
4311
4312    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4313        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4314        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4315    }
4316
4317    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4318        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4319        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4320    }
4321
4322    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4323        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4324                *effectMode);
4325        if (NAME_NOT_FOUND != val) {
4326            uint8_t fwk_effectMode = (uint8_t)val;
4327            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4328        }
4329    }
4330
4331    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4332            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4333        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4334                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4335        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4336            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4337        }
4338        int32_t fwk_testPatternData[4];
4339        fwk_testPatternData[0] = testPatternData->r;
4340        fwk_testPatternData[3] = testPatternData->b;
4341        switch (gCamCapability[mCameraId]->color_arrangement) {
4342        case CAM_FILTER_ARRANGEMENT_RGGB:
4343        case CAM_FILTER_ARRANGEMENT_GRBG:
4344            fwk_testPatternData[1] = testPatternData->gr;
4345            fwk_testPatternData[2] = testPatternData->gb;
4346            break;
4347        case CAM_FILTER_ARRANGEMENT_GBRG:
4348        case CAM_FILTER_ARRANGEMENT_BGGR:
4349            fwk_testPatternData[2] = testPatternData->gr;
4350            fwk_testPatternData[1] = testPatternData->gb;
4351            break;
4352        default:
4353            ALOGE("%s: color arrangement %d is not supported", __func__,
4354                gCamCapability[mCameraId]->color_arrangement);
4355            break;
4356        }
4357        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4358    }
4359
4360    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4361        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4362    }
4363
4364    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4365        String8 str((const char *)gps_methods);
4366        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4367    }
4368
4369    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4370        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4371    }
4372
4373    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4374        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4375    }
4376
4377    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4378        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4379        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4380    }
4381
4382    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4383        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4384        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4385    }
4386
4387    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4388        int32_t fwk_thumb_size[2];
4389        fwk_thumb_size[0] = thumb_size->width;
4390        fwk_thumb_size[1] = thumb_size->height;
4391        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4392    }
4393
4394    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4395        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4396                privateData,
4397                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4398    }
4399
4400    if (metadata->is_tuning_params_valid) {
4401        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4402        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4403        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4404
4405
4406        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4407                sizeof(uint32_t));
4408        data += sizeof(uint32_t);
4409
4410        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4411                sizeof(uint32_t));
4412        CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4413        data += sizeof(uint32_t);
4414
4415        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4416                sizeof(uint32_t));
4417        CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4418        data += sizeof(uint32_t);
4419
4420        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4421                sizeof(uint32_t));
4422        CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4423        data += sizeof(uint32_t);
4424
4425        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4426                sizeof(uint32_t));
4427        CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4428        data += sizeof(uint32_t);
4429
4430        metadata->tuning_params.tuning_mod3_data_size = 0;
4431        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4432                sizeof(uint32_t));
4433        CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4434        data += sizeof(uint32_t);
4435
4436        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4437                TUNING_SENSOR_DATA_MAX);
4438        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4439                count);
4440        data += count;
4441
4442        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4443                TUNING_VFE_DATA_MAX);
4444        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4445                count);
4446        data += count;
4447
4448        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4449                TUNING_CPP_DATA_MAX);
4450        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4451                count);
4452        data += count;
4453
4454        count = MIN(metadata->tuning_params.tuning_cac_data_size,
4455                TUNING_CAC_DATA_MAX);
4456        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4457                count);
4458        data += count;
4459
4460        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4461                (int32_t *)(void *)tuning_meta_data_blob,
4462                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4463    }
4464
4465    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4466            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4467        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4468                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4469                NEUTRAL_COL_POINTS);
4470    }
4471
4472    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4473        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4474        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4475    }
4476
4477    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4478        int32_t aeRegions[REGIONS_TUPLE_COUNT];
4479        // Adjust crop region from sensor output coordinate system to active
4480        // array coordinate system.
4481        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4482                hAeRegions->rect.width, hAeRegions->rect.height);
4483
4484        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4485        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4486                REGIONS_TUPLE_COUNT);
4487        CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4488                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4489                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4490                hAeRegions->rect.height);
4491    }
4492
4493    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4494        uint8_t fwk_afState = (uint8_t) *afState;
4495        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4496        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4497    }
4498
4499    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4500        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4501    }
4502
4503    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4504        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4505    }
4506
4507    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4508        uint8_t fwk_lensState = *lensState;
4509        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4510    }
4511
4512    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4513        /*af regions*/
4514        int32_t afRegions[REGIONS_TUPLE_COUNT];
4515        // Adjust crop region from sensor output coordinate system to active
4516        // array coordinate system.
4517        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4518                hAfRegions->rect.width, hAfRegions->rect.height);
4519
4520        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4521        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4522                REGIONS_TUPLE_COUNT);
4523        CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4524                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4525                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4526                hAfRegions->rect.height);
4527    }
4528
4529    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4530        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4531                *hal_ab_mode);
4532        if (NAME_NOT_FOUND != val) {
4533            uint8_t fwk_ab_mode = (uint8_t)val;
4534            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4535        }
4536    }
4537
4538    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4539        int val = lookupFwkName(SCENE_MODES_MAP,
4540                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4541        if (NAME_NOT_FOUND != val) {
4542            uint8_t fwkBestshotMode = (uint8_t)val;
4543            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4544            CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4545        } else {
4546            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4547        }
4548    }
4549
4550    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4551         uint8_t fwk_mode = (uint8_t) *mode;
4552         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4553    }
4554
4555    /* Constant metadata values to be update*/
4556    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4557    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4558
4559    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4560    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4561
4562    int32_t hotPixelMap[2];
4563    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4564
4565    // CDS
4566    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4567        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4568    }
4569
4570    // TNR
4571    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4572        uint8_t tnr_enable       = tnr->denoise_enable;
4573        int32_t tnr_process_type = (int32_t)tnr->process_plates;
4574
4575        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4576        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4577    }
4578
4579    // Reprocess crop data
4580    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4581        uint8_t cnt = crop_data->num_of_streams;
4582        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4583            // mm-qcamera-daemon only posts crop_data for streams
4584            // not linked to pproc. So no valid crop metadata is not
4585            // necessarily an error case.
4586            CDBG("%s: No valid crop metadata entries", __func__);
4587        } else {
4588            uint32_t reproc_stream_id;
4589            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4590                CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4591            } else {
4592                int rc = NO_ERROR;
4593                Vector<int32_t> roi_map;
4594                int32_t *crop = new int32_t[cnt*4];
4595                if (NULL == crop) {
4596                   rc = NO_MEMORY;
4597                }
4598                if (NO_ERROR == rc) {
4599                    int32_t streams_found = 0;
4600                    for (size_t i = 0; i < cnt; i++) {
4601                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4602                            if (pprocDone) {
4603                                // HAL already does internal reprocessing,
4604                                // either via reprocessing before JPEG encoding,
4605                                // or offline postprocessing for pproc bypass case.
4606                                crop[0] = 0;
4607                                crop[1] = 0;
4608                                crop[2] = mInputStreamInfo.dim.width;
4609                                crop[3] = mInputStreamInfo.dim.height;
4610                            } else {
4611                                crop[0] = crop_data->crop_info[i].crop.left;
4612                                crop[1] = crop_data->crop_info[i].crop.top;
4613                                crop[2] = crop_data->crop_info[i].crop.width;
4614                                crop[3] = crop_data->crop_info[i].crop.height;
4615                            }
4616                            roi_map.add(crop_data->crop_info[i].roi_map.left);
4617                            roi_map.add(crop_data->crop_info[i].roi_map.top);
4618                            roi_map.add(crop_data->crop_info[i].roi_map.width);
4619                            roi_map.add(crop_data->crop_info[i].roi_map.height);
4620                            streams_found++;
4621                            CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4622                                    __func__,
4623                                    crop[0], crop[1], crop[2], crop[3]);
4624                            CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4625                                    __func__,
4626                                    crop_data->crop_info[i].roi_map.left,
4627                                    crop_data->crop_info[i].roi_map.top,
4628                                    crop_data->crop_info[i].roi_map.width,
4629                                    crop_data->crop_info[i].roi_map.height);
4630                            break;
4631
4632                       }
4633                    }
4634                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4635                            &streams_found, 1);
4636                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
4637                            crop, (size_t)(streams_found * 4));
4638                    if (roi_map.array()) {
4639                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4640                                roi_map.array(), roi_map.size());
4641                    }
4642               }
4643               if (crop) {
4644                   delete [] crop;
4645               }
4646            }
4647        }
4648    }
4649
4650    IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4651        int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4652                *cacMode);
4653        if (NAME_NOT_FOUND != val) {
4654            uint8_t fwkCacMode = (uint8_t)val;
4655            camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4656        } else {
4657            ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4658        }
4659    }
4660
4661    // Post blob of cam_cds_data through vendor tag.
4662    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4663        uint8_t cnt = cdsInfo->num_of_streams;
4664        cam_cds_data_t cdsDataOverride;
4665        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4666        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4667        cdsDataOverride.num_of_streams = 1;
4668        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4669            uint32_t reproc_stream_id;
4670            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4671                CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
4672            } else {
4673                for (size_t i = 0; i < cnt; i++) {
4674                    if (cdsInfo->cds_info[i].stream_id ==
4675                            reproc_stream_id) {
4676                        cdsDataOverride.cds_info[0].cds_enable =
4677                                cdsInfo->cds_info[i].cds_enable;
4678                        break;
4679                    }
4680                }
4681            }
4682        } else {
4683            CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
4684        }
4685        camMetadata.update(QCAMERA3_CDS_INFO,
4686                (uint8_t *)&cdsDataOverride,
4687                sizeof(cam_cds_data_t));
4688    }
4689
4690    // Ldaf calibration data
4691    if (!mLdafCalibExist) {
4692        IF_META_AVAILABLE(uint32_t, ldafCalib,
4693                CAM_INTF_META_LDAF_EXIF, metadata) {
4694            mLdafCalibExist = true;
4695            mLdafCalib[0] = ldafCalib[0];
4696            mLdafCalib[1] = ldafCalib[1];
4697            CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
4698                    ldafCalib[0], ldafCalib[1]);
4699        }
4700    }
4701
4702    resultMetadata = camMetadata.release();
4703    return resultMetadata;
4704}
4705
4706/*===========================================================================
4707 * FUNCTION   : saveExifParams
4708 *
4709 * DESCRIPTION:
4710 *
4711 * PARAMETERS :
4712 *   @metadata : metadata information from callback
4713 *
4714 * RETURN     : none
4715 *
4716 *==========================================================================*/
4717void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
4718{
4719    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
4720            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
4721        mExifParams.ae_debug_params = *ae_exif_debug_params;
4722        mExifParams.ae_debug_params_valid = TRUE;
4723    }
4724    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
4725            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
4726        mExifParams.awb_debug_params = *awb_exif_debug_params;
4727        mExifParams.awb_debug_params_valid = TRUE;
4728    }
4729    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
4730            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
4731        mExifParams.af_debug_params = *af_exif_debug_params;
4732        mExifParams.af_debug_params_valid = TRUE;
4733    }
4734    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
4735            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
4736        mExifParams.asd_debug_params = *asd_exif_debug_params;
4737        mExifParams.asd_debug_params_valid = TRUE;
4738    }
4739    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
4740            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
4741        mExifParams.stats_debug_params = *stats_exif_debug_params;
4742        mExifParams.stats_debug_params_valid = TRUE;
4743    }
4744}
4745
4746/*===========================================================================
4747 * FUNCTION   : get3AExifParams
4748 *
4749 * DESCRIPTION:
4750 *
4751 * PARAMETERS : none
4752 *
4753 *
4754 * RETURN     : mm_jpeg_exif_params_t
4755 *
4756 *==========================================================================*/
4757mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
4758{
4759    return mExifParams;
4760}
4761
4762/*===========================================================================
4763 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
4764 *
4765 * DESCRIPTION:
4766 *
4767 * PARAMETERS :
4768 *   @metadata : metadata information from callback
4769 *
4770 * RETURN     : camera_metadata_t*
4771 *              metadata in a format specified by fwk
4772 *==========================================================================*/
4773camera_metadata_t*
4774QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
4775                                (metadata_buffer_t *metadata)
4776{
4777    CameraMetadata camMetadata;
4778    camera_metadata_t *resultMetadata;
4779
4780
4781    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
4782        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
4783        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
4784        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
4785    }
4786
4787    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
4788        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
4789                &aecTrigger->trigger, 1);
4790        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
4791                &aecTrigger->trigger_id, 1);
4792        CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
4793                __func__, aecTrigger->trigger);
4794        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
4795                aecTrigger->trigger_id);
4796    }
4797
4798    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
4799        uint8_t fwk_ae_state = (uint8_t) *ae_state;
4800        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
4801        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
4802    }
4803
4804    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4805        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4806        if (NAME_NOT_FOUND != val) {
4807            uint8_t fwkAfMode = (uint8_t)val;
4808            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4809            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
4810        } else {
4811            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__,
4812                    val);
4813        }
4814    }
4815
4816    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
4817        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
4818                &af_trigger->trigger, 1);
4819        CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
4820                __func__, af_trigger->trigger);
4821        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
4822        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
4823                af_trigger->trigger_id);
4824    }
4825
4826    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
4827        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4828                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
4829        if (NAME_NOT_FOUND != val) {
4830            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
4831            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
4832            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
4833        } else {
4834            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
4835        }
4836    }
4837
4838    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4839    uint32_t aeMode = CAM_AE_MODE_MAX;
4840    int32_t flashMode = CAM_FLASH_MODE_MAX;
4841    int32_t redeye = -1;
4842    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
4843        aeMode = *pAeMode;
4844    }
4845    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
4846        flashMode = *pFlashMode;
4847    }
4848    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
4849        redeye = *pRedeye;
4850    }
4851
4852    if (1 == redeye) {
4853        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4854        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4855    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
4856        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
4857                flashMode);
4858        if (NAME_NOT_FOUND != val) {
4859            fwk_aeMode = (uint8_t)val;
4860            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4861        } else {
4862            ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
4863        }
4864    } else if (aeMode == CAM_AE_MODE_ON) {
4865        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
4866        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4867    } else if (aeMode == CAM_AE_MODE_OFF) {
4868        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4869        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4870    } else {
4871        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
4872              "flashMode:%d, aeMode:%u!!!",
4873                __func__, redeye, flashMode, aeMode);
4874    }
4875
4876    resultMetadata = camMetadata.release();
4877    return resultMetadata;
4878}
4879
4880/*===========================================================================
4881 * FUNCTION   : dumpMetadataToFile
4882 *
4883 * DESCRIPTION: Dumps tuning metadata to file system
4884 *
4885 * PARAMETERS :
4886 *   @meta           : tuning metadata
4887 *   @dumpFrameCount : current dump frame count
4888 *   @enabled        : Enable mask
4889 *
4890 *==========================================================================*/
4891void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
4892                                                   uint32_t &dumpFrameCount,
4893                                                   bool enabled,
4894                                                   const char *type,
4895                                                   uint32_t frameNumber)
4896{
4897    uint32_t frm_num = 0;
4898
4899    //Some sanity checks
4900    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
4901        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
4902              __func__,
4903              meta.tuning_sensor_data_size,
4904              TUNING_SENSOR_DATA_MAX);
4905        return;
4906    }
4907
4908    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
4909        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
4910              __func__,
4911              meta.tuning_vfe_data_size,
4912              TUNING_VFE_DATA_MAX);
4913        return;
4914    }
4915
4916    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
4917        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
4918              __func__,
4919              meta.tuning_cpp_data_size,
4920              TUNING_CPP_DATA_MAX);
4921        return;
4922    }
4923
4924    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
4925        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
4926              __func__,
4927              meta.tuning_cac_data_size,
4928              TUNING_CAC_DATA_MAX);
4929        return;
4930    }
4931    //
4932
4933    if(enabled){
4934        char timeBuf[FILENAME_MAX];
4935        char buf[FILENAME_MAX];
4936        memset(buf, 0, sizeof(buf));
4937        memset(timeBuf, 0, sizeof(timeBuf));
4938        time_t current_time;
4939        struct tm * timeinfo;
4940        time (&current_time);
4941        timeinfo = localtime (&current_time);
4942        if (timeinfo != NULL) {
4943            strftime (timeBuf, sizeof(timeBuf),
4944                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
4945        }
4946        String8 filePath(timeBuf);
4947        snprintf(buf,
4948                sizeof(buf),
4949                "%dm_%s_%d.bin",
4950                dumpFrameCount,
4951                type,
4952                frameNumber);
4953        filePath.append(buf);
4954        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
4955        if (file_fd >= 0) {
4956            ssize_t written_len = 0;
4957            meta.tuning_data_version = TUNING_DATA_VERSION;
4958            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
4959            written_len += write(file_fd, data, sizeof(uint32_t));
4960            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
4961            CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4962            written_len += write(file_fd, data, sizeof(uint32_t));
4963            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
4964            CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4965            written_len += write(file_fd, data, sizeof(uint32_t));
4966            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
4967            CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4968            written_len += write(file_fd, data, sizeof(uint32_t));
4969            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
4970            CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4971            written_len += write(file_fd, data, sizeof(uint32_t));
4972            meta.tuning_mod3_data_size = 0;
4973            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
4974            CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4975            written_len += write(file_fd, data, sizeof(uint32_t));
4976            size_t total_size = meta.tuning_sensor_data_size;
4977            data = (void *)((uint8_t *)&meta.data);
4978            written_len += write(file_fd, data, total_size);
4979            total_size = meta.tuning_vfe_data_size;
4980            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
4981            written_len += write(file_fd, data, total_size);
4982            total_size = meta.tuning_cpp_data_size;
4983            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
4984            written_len += write(file_fd, data, total_size);
4985            total_size = meta.tuning_cac_data_size;
4986            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
4987            written_len += write(file_fd, data, total_size);
4988            close(file_fd);
4989        }else {
4990            ALOGE("%s: fail to open file for metadata dumping", __func__);
4991        }
4992    }
4993}
4994
4995/*===========================================================================
4996 * FUNCTION   : cleanAndSortStreamInfo
4997 *
4998 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
4999 *              and sort them such that raw stream is at the end of the list
5000 *              This is a workaround for camera daemon constraint.
5001 *
5002 * PARAMETERS : None
5003 *
5004 *==========================================================================*/
5005void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5006{
5007    List<stream_info_t *> newStreamInfo;
5008
5009    /*clean up invalid streams*/
5010    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5011            it != mStreamInfo.end();) {
5012        if(((*it)->status) == INVALID){
5013            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5014            delete channel;
5015            free(*it);
5016            it = mStreamInfo.erase(it);
5017        } else {
5018            it++;
5019        }
5020    }
5021
5022    // Move preview/video/callback/snapshot streams into newList
5023    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5024            it != mStreamInfo.end();) {
5025        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5026                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5027                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5028            newStreamInfo.push_back(*it);
5029            it = mStreamInfo.erase(it);
5030        } else
5031            it++;
5032    }
5033    // Move raw streams into newList
5034    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5035            it != mStreamInfo.end();) {
5036        newStreamInfo.push_back(*it);
5037        it = mStreamInfo.erase(it);
5038    }
5039
5040    mStreamInfo = newStreamInfo;
5041}
5042
5043/*===========================================================================
5044 * FUNCTION   : extractJpegMetadata
5045 *
5046 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5047 *              JPEG metadata is cached in HAL, and return as part of capture
5048 *              result when metadata is returned from camera daemon.
5049 *
5050 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5051 *              @request:      capture request
5052 *
5053 *==========================================================================*/
5054void QCamera3HardwareInterface::extractJpegMetadata(
5055        CameraMetadata& jpegMetadata,
5056        const camera3_capture_request_t *request)
5057{
5058    CameraMetadata frame_settings;
5059    frame_settings = request->settings;
5060
5061    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5062        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5063                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5064                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5065
5066    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5067        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5068                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5069                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5070
5071    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5072        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5073                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5074                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5075
5076    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5077        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5078                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5079                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5080
5081    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5082        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5083                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5084                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5085
5086    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5087        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5088                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5089                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5090
5091    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5092        int32_t thumbnail_size[2];
5093        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5094        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5095        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5096            int32_t orientation =
5097                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5098            if ((orientation == 90) || (orientation == 270)) {
5099               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5100               int32_t temp;
5101               temp = thumbnail_size[0];
5102               thumbnail_size[0] = thumbnail_size[1];
5103               thumbnail_size[1] = temp;
5104            }
5105         }
5106         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5107                thumbnail_size,
5108                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5109    }
5110
5111}
5112
5113/*===========================================================================
5114 * FUNCTION   : convertToRegions
5115 *
5116 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5117 *
5118 * PARAMETERS :
5119 *   @rect   : cam_rect_t struct to convert
5120 *   @region : int32_t destination array
5121 *   @weight : if we are converting from cam_area_t, weight is valid
5122 *             else weight = -1
5123 *
5124 *==========================================================================*/
5125void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5126        int32_t *region, int weight)
5127{
5128    region[0] = rect.left;
5129    region[1] = rect.top;
5130    region[2] = rect.left + rect.width;
5131    region[3] = rect.top + rect.height;
5132    if (weight > -1) {
5133        region[4] = weight;
5134    }
5135}
5136
5137/*===========================================================================
5138 * FUNCTION   : convertFromRegions
5139 *
5140 * DESCRIPTION: helper method to convert from array to cam_rect_t
5141 *
5142 * PARAMETERS :
5143 *   @rect   : cam_rect_t struct to convert
5144 *   @region : int32_t destination array
5145 *   @weight : if we are converting from cam_area_t, weight is valid
5146 *             else weight = -1
5147 *
5148 *==========================================================================*/
5149void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5150        const camera_metadata_t *settings, uint32_t tag)
5151{
5152    CameraMetadata frame_settings;
5153    frame_settings = settings;
5154    int32_t x_min = frame_settings.find(tag).data.i32[0];
5155    int32_t y_min = frame_settings.find(tag).data.i32[1];
5156    int32_t x_max = frame_settings.find(tag).data.i32[2];
5157    int32_t y_max = frame_settings.find(tag).data.i32[3];
5158    roi.weight = frame_settings.find(tag).data.i32[4];
5159    roi.rect.left = x_min;
5160    roi.rect.top = y_min;
5161    roi.rect.width = x_max - x_min;
5162    roi.rect.height = y_max - y_min;
5163}
5164
5165/*===========================================================================
5166 * FUNCTION   : resetIfNeededROI
5167 *
5168 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5169 *              crop region
5170 *
5171 * PARAMETERS :
5172 *   @roi       : cam_area_t struct to resize
5173 *   @scalerCropRegion : cam_crop_region_t region to compare against
5174 *
5175 *
5176 *==========================================================================*/
5177bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5178                                                 const cam_crop_region_t* scalerCropRegion)
5179{
5180    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5181    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5182    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5183    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5184
5185    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5186     * without having this check the calculations below to validate if the roi
5187     * is inside scalar crop region will fail resulting in the roi not being
5188     * reset causing algorithm to continue to use stale roi window
5189     */
5190    if (roi->weight == 0) {
5191        return true;
5192    }
5193
5194    if ((roi_x_max < scalerCropRegion->left) ||
5195        // right edge of roi window is left of scalar crop's left edge
5196        (roi_y_max < scalerCropRegion->top)  ||
5197        // bottom edge of roi window is above scalar crop's top edge
5198        (roi->rect.left > crop_x_max) ||
5199        // left edge of roi window is beyond(right) of scalar crop's right edge
5200        (roi->rect.top > crop_y_max)){
5201        // top edge of roi windo is above scalar crop's top edge
5202        return false;
5203    }
5204    if (roi->rect.left < scalerCropRegion->left) {
5205        roi->rect.left = scalerCropRegion->left;
5206    }
5207    if (roi->rect.top < scalerCropRegion->top) {
5208        roi->rect.top = scalerCropRegion->top;
5209    }
5210    if (roi_x_max > crop_x_max) {
5211        roi_x_max = crop_x_max;
5212    }
5213    if (roi_y_max > crop_y_max) {
5214        roi_y_max = crop_y_max;
5215    }
5216    roi->rect.width = roi_x_max - roi->rect.left;
5217    roi->rect.height = roi_y_max - roi->rect.top;
5218    return true;
5219}
5220
5221/*===========================================================================
5222 * FUNCTION   : convertLandmarks
5223 *
5224 * DESCRIPTION: helper method to extract the landmarks from face detection info
5225 *
5226 * PARAMETERS :
5227 *   @face   : cam_rect_t struct to convert
5228 *   @landmarks : int32_t destination array
5229 *
5230 *
5231 *==========================================================================*/
5232void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5233{
5234    landmarks[0] = (int32_t)face.left_eye_center.x;
5235    landmarks[1] = (int32_t)face.left_eye_center.y;
5236    landmarks[2] = (int32_t)face.right_eye_center.x;
5237    landmarks[3] = (int32_t)face.right_eye_center.y;
5238    landmarks[4] = (int32_t)face.mouth_center.x;
5239    landmarks[5] = (int32_t)face.mouth_center.y;
5240}
5241
5242#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5243/*===========================================================================
5244 * FUNCTION   : initCapabilities
5245 *
5246 * DESCRIPTION: initialize camera capabilities in static data struct
5247 *
5248 * PARAMETERS :
5249 *   @cameraId  : camera Id
5250 *
5251 * RETURN     : int32_t type of status
5252 *              NO_ERROR  -- success
5253 *              none-zero failure code
5254 *==========================================================================*/
5255int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5256{
5257    int rc = 0;
5258    mm_camera_vtbl_t *cameraHandle = NULL;
5259    QCamera3HeapMemory *capabilityHeap = NULL;
5260
5261    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5262    if (rc || !cameraHandle) {
5263        ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5264        goto open_failed;
5265    }
5266
5267    capabilityHeap = new QCamera3HeapMemory(1);
5268    if (capabilityHeap == NULL) {
5269        ALOGE("%s: creation of capabilityHeap failed", __func__);
5270        goto heap_creation_failed;
5271    }
5272    /* Allocate memory for capability buffer */
5273    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5274    if(rc != OK) {
5275        ALOGE("%s: No memory for cappability", __func__);
5276        goto allocate_failed;
5277    }
5278
5279    /* Map memory for capability buffer */
5280    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5281    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5282                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5283                                capabilityHeap->getFd(0),
5284                                sizeof(cam_capability_t));
5285    if(rc < 0) {
5286        ALOGE("%s: failed to map capability buffer", __func__);
5287        goto map_failed;
5288    }
5289
5290    /* Query Capability */
5291    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5292    if(rc < 0) {
5293        ALOGE("%s: failed to query capability",__func__);
5294        goto query_failed;
5295    }
5296    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5297    if (!gCamCapability[cameraId]) {
5298        ALOGE("%s: out of memory", __func__);
5299        goto query_failed;
5300    }
5301    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5302                                        sizeof(cam_capability_t));
5303    rc = 0;
5304
5305query_failed:
5306    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5307                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5308map_failed:
5309    capabilityHeap->deallocate();
5310allocate_failed:
5311    delete capabilityHeap;
5312heap_creation_failed:
5313    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5314    cameraHandle = NULL;
5315open_failed:
5316    return rc;
5317}
5318
5319/*==========================================================================
5320 * FUNCTION   : get3Aversion
5321 *
5322 * DESCRIPTION: get the Q3A S/W version
5323 *
5324 * PARAMETERS :
5325 *  @sw_version: Reference of Q3A structure which will hold version info upon
5326 *               return
5327 *
5328 * RETURN     : None
5329 *
5330 *==========================================================================*/
5331void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5332{
5333    if(gCamCapability[mCameraId])
5334        sw_version = gCamCapability[mCameraId]->q3a_version;
5335    else
5336        ALOGE("%s:Capability structure NULL!", __func__);
5337}
5338
5339
5340/*===========================================================================
5341 * FUNCTION   : initParameters
5342 *
5343 * DESCRIPTION: initialize camera parameters
5344 *
5345 * PARAMETERS :
5346 *
5347 * RETURN     : int32_t type of status
5348 *              NO_ERROR  -- success
5349 *              none-zero failure code
5350 *==========================================================================*/
5351int QCamera3HardwareInterface::initParameters()
5352{
5353    int rc = 0;
5354
5355    //Allocate Set Param Buffer
5356    mParamHeap = new QCamera3HeapMemory(1);
5357    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5358    if(rc != OK) {
5359        rc = NO_MEMORY;
5360        ALOGE("Failed to allocate SETPARM Heap memory");
5361        delete mParamHeap;
5362        mParamHeap = NULL;
5363        return rc;
5364    }
5365
5366    //Map memory for parameters buffer
5367    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5368            CAM_MAPPING_BUF_TYPE_PARM_BUF,
5369            mParamHeap->getFd(0),
5370            sizeof(metadata_buffer_t));
5371    if(rc < 0) {
5372        ALOGE("%s:failed to map SETPARM buffer",__func__);
5373        rc = FAILED_TRANSACTION;
5374        mParamHeap->deallocate();
5375        delete mParamHeap;
5376        mParamHeap = NULL;
5377        return rc;
5378    }
5379
5380    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5381
5382    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5383    return rc;
5384}
5385
5386/*===========================================================================
5387 * FUNCTION   : deinitParameters
5388 *
5389 * DESCRIPTION: de-initialize camera parameters
5390 *
5391 * PARAMETERS :
5392 *
5393 * RETURN     : NONE
5394 *==========================================================================*/
5395void QCamera3HardwareInterface::deinitParameters()
5396{
5397    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5398            CAM_MAPPING_BUF_TYPE_PARM_BUF);
5399
5400    mParamHeap->deallocate();
5401    delete mParamHeap;
5402    mParamHeap = NULL;
5403
5404    mParameters = NULL;
5405
5406    free(mPrevParameters);
5407    mPrevParameters = NULL;
5408}
5409
5410/*===========================================================================
5411 * FUNCTION   : calcMaxJpegSize
5412 *
5413 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5414 *
5415 * PARAMETERS :
5416 *
5417 * RETURN     : max_jpeg_size
5418 *==========================================================================*/
5419size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5420{
5421    size_t max_jpeg_size = 0;
5422    size_t temp_width, temp_height;
5423    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5424            MAX_SIZES_CNT);
5425    for (size_t i = 0; i < count; i++) {
5426        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5427        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5428        if (temp_width * temp_height > max_jpeg_size ) {
5429            max_jpeg_size = temp_width * temp_height;
5430        }
5431    }
5432    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5433    return max_jpeg_size;
5434}
5435
5436/*===========================================================================
5437 * FUNCTION   : getMaxRawSize
5438 *
5439 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5440 *
5441 * PARAMETERS :
5442 *
5443 * RETURN     : Largest supported Raw Dimension
5444 *==========================================================================*/
5445cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5446{
5447    int max_width = 0;
5448    cam_dimension_t maxRawSize;
5449
5450    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5451    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5452        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5453            max_width = gCamCapability[camera_id]->raw_dim[i].width;
5454            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5455        }
5456    }
5457    return maxRawSize;
5458}
5459
5460
5461/*===========================================================================
5462 * FUNCTION   : calcMaxJpegDim
5463 *
5464 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5465 *
5466 * PARAMETERS :
5467 *
5468 * RETURN     : max_jpeg_dim
5469 *==========================================================================*/
5470cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5471{
5472    cam_dimension_t max_jpeg_dim;
5473    cam_dimension_t curr_jpeg_dim;
5474    max_jpeg_dim.width = 0;
5475    max_jpeg_dim.height = 0;
5476    curr_jpeg_dim.width = 0;
5477    curr_jpeg_dim.height = 0;
5478    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5479        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5480        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5481        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5482            max_jpeg_dim.width * max_jpeg_dim.height ) {
5483            max_jpeg_dim.width = curr_jpeg_dim.width;
5484            max_jpeg_dim.height = curr_jpeg_dim.height;
5485        }
5486    }
5487    return max_jpeg_dim;
5488}
5489
5490/*===========================================================================
5491 * FUNCTION   : addStreamConfig
5492 *
5493 * DESCRIPTION: adds the stream configuration to the array
5494 *
5495 * PARAMETERS :
5496 * @available_stream_configs : pointer to stream configuration array
5497 * @scalar_format            : scalar format
5498 * @dim                      : configuration dimension
5499 * @config_type              : input or output configuration type
5500 *
5501 * RETURN     : NONE
5502 *==========================================================================*/
5503void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5504        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5505{
5506    available_stream_configs.add(scalar_format);
5507    available_stream_configs.add(dim.width);
5508    available_stream_configs.add(dim.height);
5509    available_stream_configs.add(config_type);
5510}
5511
5512
5513/*===========================================================================
5514 * FUNCTION   : initStaticMetadata
5515 *
5516 * DESCRIPTION: initialize the static metadata
5517 *
5518 * PARAMETERS :
5519 *   @cameraId  : camera Id
5520 *
5521 * RETURN     : int32_t type of status
5522 *              0  -- success
5523 *              non-zero failure code
5524 *==========================================================================*/
5525int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5526{
5527    int rc = 0;
5528    CameraMetadata staticInfo;
5529    size_t count = 0;
5530    bool limitedDevice = false;
5531    char prop[PROPERTY_VALUE_MAX];
5532
5533    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5534     * guaranteed, its advertised as limited device */
5535    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5536            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5537
5538    uint8_t supportedHwLvl = limitedDevice ?
5539            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5540            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
5541
5542    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5543            &supportedHwLvl, 1);
5544
5545    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5546    /*HAL 3 only*/
5547    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5548                    &gCamCapability[cameraId]->min_focus_distance, 1);
5549
5550    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5551                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
5552
5553    /*should be using focal lengths but sensor doesn't provide that info now*/
5554    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5555                      &gCamCapability[cameraId]->focal_length,
5556                      1);
5557
5558    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5559                      gCamCapability[cameraId]->apertures,
5560                      gCamCapability[cameraId]->apertures_count);
5561
5562    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5563                gCamCapability[cameraId]->filter_densities,
5564                gCamCapability[cameraId]->filter_densities_count);
5565
5566
5567    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5568                      (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5569                      gCamCapability[cameraId]->optical_stab_modes_count);
5570
5571    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5572            gCamCapability[cameraId]->lens_shading_map_size.height};
5573    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5574                      lens_shading_map_size,
5575                      sizeof(lens_shading_map_size)/sizeof(int32_t));
5576
5577    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5578            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5579
5580    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5581            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5582
5583    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5584            &gCamCapability[cameraId]->max_frame_duration, 1);
5585
5586    camera_metadata_rational baseGainFactor = {
5587            gCamCapability[cameraId]->base_gain_factor.numerator,
5588            gCamCapability[cameraId]->base_gain_factor.denominator};
5589    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5590                      &baseGainFactor, 1);
5591
5592    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5593                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5594
5595    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5596            gCamCapability[cameraId]->pixel_array_size.height};
5597    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5598                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5599
5600    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5601                                                gCamCapability[cameraId]->active_array_size.top,
5602                                                gCamCapability[cameraId]->active_array_size.width,
5603                                                gCamCapability[cameraId]->active_array_size.height};
5604    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5605                      active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5606
5607    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5608            &gCamCapability[cameraId]->white_level, 1);
5609
5610    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5611            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5612
5613    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5614                      &gCamCapability[cameraId]->flash_charge_duration, 1);
5615
5616    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5617                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5618
5619    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
5620    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5621            &timestampSource, 1);
5622
5623    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5624                      &gCamCapability[cameraId]->histogram_size, 1);
5625
5626    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5627            &gCamCapability[cameraId]->max_histogram_count, 1);
5628
5629    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5630            gCamCapability[cameraId]->sharpness_map_size.height};
5631
5632    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5633            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5634
5635    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5636            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5637
5638    int32_t scalar_formats[] = {
5639            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5640            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5641            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5642            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5643            HAL_PIXEL_FORMAT_RAW10,
5644            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5645    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5646    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5647                      scalar_formats,
5648                      scalar_formats_count);
5649
5650    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5651    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5652    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
5653            count, MAX_SIZES_CNT, available_processed_sizes);
5654    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
5655            available_processed_sizes, count * 2);
5656
5657    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
5658    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
5659    makeTable(gCamCapability[cameraId]->raw_dim,
5660            count, MAX_SIZES_CNT, available_raw_sizes);
5661    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
5662            available_raw_sizes, count * 2);
5663
5664    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
5665    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
5666    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
5667            count, MAX_SIZES_CNT, available_fps_ranges);
5668    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5669            available_fps_ranges, count * 2);
5670
5671    camera_metadata_rational exposureCompensationStep = {
5672            gCamCapability[cameraId]->exp_compensation_step.numerator,
5673            gCamCapability[cameraId]->exp_compensation_step.denominator};
5674    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
5675                      &exposureCompensationStep, 1);
5676
5677    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
5678    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5679                      availableVstabModes, sizeof(availableVstabModes));
5680
5681    /*HAL 1 and HAL 3 common*/
5682    float maxZoom = 4;
5683    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5684            &maxZoom, 1);
5685
5686    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
5687    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
5688
5689    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
5690    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
5691        max3aRegions[2] = 0; /* AF not supported */
5692    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
5693            max3aRegions, 3);
5694
5695    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
5696    memset(prop, 0, sizeof(prop));
5697    property_get("persist.camera.facedetect", prop, "1");
5698    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
5699    CDBG("%s: Support face detection mode: %d",
5700            __func__, supportedFaceDetectMode);
5701
5702    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
5703    Vector<uint8_t> availableFaceDetectModes;
5704    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
5705    if (supportedFaceDetectMode == 1) {
5706        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5707    } else if (supportedFaceDetectMode == 2) {
5708        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5709    } else if (supportedFaceDetectMode == 3) {
5710        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5711        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5712    } else {
5713        maxFaces = 0;
5714    }
5715    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5716            availableFaceDetectModes.array(),
5717            availableFaceDetectModes.size());
5718    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
5719            (int32_t *)&maxFaces, 1);
5720
5721    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
5722                                           gCamCapability[cameraId]->exposure_compensation_max};
5723    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
5724            exposureCompensationRange,
5725            sizeof(exposureCompensationRange)/sizeof(int32_t));
5726
5727    uint8_t lensFacing = (facingBack) ?
5728            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
5729    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
5730
5731    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5732                      available_thumbnail_sizes,
5733                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
5734
5735    /*all sizes will be clubbed into this tag*/
5736    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
5737    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5738    size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
5739            count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
5740            gCamCapability[cameraId]->max_downscale_factor);
5741    /*android.scaler.availableStreamConfigurations*/
5742    size_t max_stream_configs_size = count * scalar_formats_count * 4;
5743    Vector<int32_t> available_stream_configs;
5744    cam_dimension_t active_array_dim;
5745    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
5746    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
5747    /* Add input/output stream configurations for each scalar formats*/
5748    for (size_t j = 0; j < scalar_formats_count; j++) {
5749        switch (scalar_formats[j]) {
5750        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5751        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5752        case HAL_PIXEL_FORMAT_RAW10:
5753            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5754                addStreamConfig(available_stream_configs, scalar_formats[j],
5755                        gCamCapability[cameraId]->raw_dim[i],
5756                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5757            }
5758            break;
5759        case HAL_PIXEL_FORMAT_BLOB:
5760            cam_dimension_t jpeg_size;
5761            for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
5762                jpeg_size.width  = available_jpeg_sizes[i*2];
5763                jpeg_size.height = available_jpeg_sizes[i*2+1];
5764                addStreamConfig(available_stream_configs, scalar_formats[j],
5765                        jpeg_size,
5766                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5767            }
5768            break;
5769        case HAL_PIXEL_FORMAT_YCbCr_420_888:
5770        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
5771        default:
5772            cam_dimension_t largest_picture_size;
5773            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
5774            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5775                addStreamConfig(available_stream_configs, scalar_formats[j],
5776                        gCamCapability[cameraId]->picture_sizes_tbl[i],
5777                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5778                /* Book keep largest */
5779                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
5780                        >= largest_picture_size.width &&
5781                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
5782                        >= largest_picture_size.height)
5783                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
5784            }
5785            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
5786            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
5787                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5788                 addStreamConfig(available_stream_configs, scalar_formats[j],
5789                         largest_picture_size,
5790                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
5791            }
5792            break;
5793        }
5794    }
5795
5796    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5797                      available_stream_configs.array(), available_stream_configs.size());
5798    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5799    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5800
5801    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5802    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5803
5804    /* android.scaler.availableMinFrameDurations */
5805    int64_t available_min_durations[max_stream_configs_size];
5806    size_t idx = 0;
5807    for (size_t j = 0; j < scalar_formats_count; j++) {
5808        switch (scalar_formats[j]) {
5809        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5810        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5811        case HAL_PIXEL_FORMAT_RAW10:
5812            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5813                available_min_durations[idx] = scalar_formats[j];
5814                available_min_durations[idx+1] =
5815                    gCamCapability[cameraId]->raw_dim[i].width;
5816                available_min_durations[idx+2] =
5817                    gCamCapability[cameraId]->raw_dim[i].height;
5818                available_min_durations[idx+3] =
5819                    gCamCapability[cameraId]->raw_min_duration[i];
5820                idx+=4;
5821            }
5822            break;
5823        default:
5824            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5825                available_min_durations[idx] = scalar_formats[j];
5826                available_min_durations[idx+1] =
5827                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5828                available_min_durations[idx+2] =
5829                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5830                available_min_durations[idx+3] =
5831                    gCamCapability[cameraId]->picture_min_duration[i];
5832                idx+=4;
5833            }
5834            break;
5835        }
5836    }
5837    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
5838                      &available_min_durations[0], idx);
5839
5840    Vector<int32_t> available_hfr_configs;
5841    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
5842        int32_t fps = 0;
5843        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
5844        case CAM_HFR_MODE_60FPS:
5845            fps = 60;
5846            break;
5847        case CAM_HFR_MODE_90FPS:
5848            fps = 90;
5849            break;
5850        case CAM_HFR_MODE_120FPS:
5851            fps = 120;
5852            break;
5853        case CAM_HFR_MODE_150FPS:
5854            fps = 150;
5855            break;
5856        case CAM_HFR_MODE_180FPS:
5857            fps = 180;
5858            break;
5859        case CAM_HFR_MODE_210FPS:
5860            fps = 210;
5861            break;
5862        case CAM_HFR_MODE_240FPS:
5863            fps = 240;
5864            break;
5865        case CAM_HFR_MODE_480FPS:
5866            fps = 480;
5867            break;
5868        case CAM_HFR_MODE_OFF:
5869        case CAM_HFR_MODE_MAX:
5870        default:
5871            break;
5872        }
5873
5874        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
5875        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
5876            /* For each HFR frame rate, need to advertise one variable fps range
5877             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
5878             * [120, 120]. While camcorder preview alone is running [30, 120] is
5879             * set by the app. When video recording is started, [120, 120] is
5880             * set. This way sensor configuration does not change when recording
5881             * is started */
5882
5883            /* (width, height, fps_min, fps_max, batch_size_max) */
5884            available_hfr_configs.add(
5885                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5886            available_hfr_configs.add(
5887                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5888            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
5889            available_hfr_configs.add(fps);
5890            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5891
5892            /* (width, height, fps_min, fps_max, batch_size_max) */
5893            available_hfr_configs.add(
5894                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5895            available_hfr_configs.add(
5896                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5897            available_hfr_configs.add(fps);
5898            available_hfr_configs.add(fps);
5899            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5900       }
5901    }
5902    //Advertise HFR capability only if the property is set
5903    memset(prop, 0, sizeof(prop));
5904    property_get("persist.camera.hal3hfr.enable", prop, "1");
5905    uint8_t hfrEnable = (uint8_t)atoi(prop);
5906
5907    if(hfrEnable && available_hfr_configs.array()) {
5908        staticInfo.update(
5909                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
5910                available_hfr_configs.array(), available_hfr_configs.size());
5911    }
5912
5913    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
5914    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
5915                      &max_jpeg_size, 1);
5916
5917    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
5918    size_t size = 0;
5919    count = CAM_EFFECT_MODE_MAX;
5920    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
5921    for (size_t i = 0; i < count; i++) {
5922        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5923                gCamCapability[cameraId]->supported_effects[i]);
5924        if (NAME_NOT_FOUND != val) {
5925            avail_effects[size] = (uint8_t)val;
5926            size++;
5927        }
5928    }
5929    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
5930                      avail_effects,
5931                      size);
5932
5933    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
5934    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
5935    size_t supported_scene_modes_cnt = 0;
5936    count = CAM_SCENE_MODE_MAX;
5937    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
5938    for (size_t i = 0; i < count; i++) {
5939        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
5940                CAM_SCENE_MODE_OFF) {
5941            int val = lookupFwkName(SCENE_MODES_MAP,
5942                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
5943                    gCamCapability[cameraId]->supported_scene_modes[i]);
5944            if (NAME_NOT_FOUND != val) {
5945                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
5946                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
5947                supported_scene_modes_cnt++;
5948            }
5949        }
5950    }
5951    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
5952                      avail_scene_modes,
5953                      supported_scene_modes_cnt);
5954
5955    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
5956    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
5957                      supported_scene_modes_cnt,
5958                      CAM_SCENE_MODE_MAX,
5959                      scene_mode_overrides,
5960                      supported_indexes,
5961                      cameraId);
5962
5963    if (supported_scene_modes_cnt == 0) {
5964        supported_scene_modes_cnt = 1;
5965        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
5966    }
5967
5968    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
5969            scene_mode_overrides, supported_scene_modes_cnt * 3);
5970
5971    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
5972                                         ANDROID_CONTROL_MODE_AUTO,
5973                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
5974    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
5975            available_control_modes,
5976            3);
5977
5978    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
5979    size = 0;
5980    count = CAM_ANTIBANDING_MODE_MAX;
5981    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
5982    for (size_t i = 0; i < count; i++) {
5983        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5984                gCamCapability[cameraId]->supported_antibandings[i]);
5985        if (NAME_NOT_FOUND != val) {
5986            avail_antibanding_modes[size] = (uint8_t)val;
5987            size++;
5988        }
5989
5990    }
5991    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
5992                      avail_antibanding_modes,
5993                      size);
5994
5995    uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
5996    size = 0;
5997    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
5998    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
5999    if (0 == count) {
6000        avail_abberation_modes[0] =
6001                ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6002        size++;
6003    } else {
6004        for (size_t i = 0; i < count; i++) {
6005            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6006                    gCamCapability[cameraId]->aberration_modes[i]);
6007            if (NAME_NOT_FOUND != val) {
6008                avail_abberation_modes[size] = (uint8_t)val;
6009                size++;
6010            } else {
6011                ALOGE("%s: Invalid CAC mode %d", __func__,
6012                        gCamCapability[cameraId]->aberration_modes[i]);
6013                break;
6014            }
6015        }
6016
6017    }
6018    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6019            avail_abberation_modes,
6020            size);
6021
6022    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6023    size = 0;
6024    count = CAM_FOCUS_MODE_MAX;
6025    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6026    for (size_t i = 0; i < count; i++) {
6027        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6028                gCamCapability[cameraId]->supported_focus_modes[i]);
6029        if (NAME_NOT_FOUND != val) {
6030            avail_af_modes[size] = (uint8_t)val;
6031            size++;
6032        }
6033    }
6034    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6035                      avail_af_modes,
6036                      size);
6037
6038    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6039    size = 0;
6040    count = CAM_WB_MODE_MAX;
6041    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6042    for (size_t i = 0; i < count; i++) {
6043        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6044                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6045                gCamCapability[cameraId]->supported_white_balances[i]);
6046        if (NAME_NOT_FOUND != val) {
6047            avail_awb_modes[size] = (uint8_t)val;
6048            size++;
6049        }
6050    }
6051    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6052                      avail_awb_modes,
6053                      size);
6054
6055    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6056    count = CAM_FLASH_FIRING_LEVEL_MAX;
6057    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6058            count);
6059    for (size_t i = 0; i < count; i++) {
6060        available_flash_levels[i] =
6061                gCamCapability[cameraId]->supported_firing_levels[i];
6062    }
6063    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6064            available_flash_levels, count);
6065
6066    uint8_t flashAvailable;
6067    if (gCamCapability[cameraId]->flash_available)
6068        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6069    else
6070        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6071    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6072            &flashAvailable, 1);
6073
6074    Vector<uint8_t> avail_ae_modes;
6075    count = CAM_AE_MODE_MAX;
6076    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6077    for (size_t i = 0; i < count; i++) {
6078        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6079    }
6080    if (flashAvailable) {
6081        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6082        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6083    }
6084    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6085                      avail_ae_modes.array(),
6086                      avail_ae_modes.size());
6087
6088    int32_t sensitivity_range[2];
6089    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6090    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6091    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6092                      sensitivity_range,
6093                      sizeof(sensitivity_range) / sizeof(int32_t));
6094
6095    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6096                      &gCamCapability[cameraId]->max_analog_sensitivity,
6097                      1);
6098
6099    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6100    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6101                      &sensor_orientation,
6102                      1);
6103
6104    int32_t max_output_streams[] = {
6105            MAX_STALLING_STREAMS,
6106            MAX_PROCESSED_STREAMS,
6107            MAX_RAW_STREAMS};
6108    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6109            max_output_streams,
6110            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6111
6112    uint8_t avail_leds = 0;
6113    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6114                      &avail_leds, 0);
6115
6116    uint8_t focus_dist_calibrated;
6117    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6118            gCamCapability[cameraId]->focus_dist_calibrated);
6119    if (NAME_NOT_FOUND != val) {
6120        focus_dist_calibrated = (uint8_t)val;
6121        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6122                     &focus_dist_calibrated, 1);
6123    }
6124
6125    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6126    size = 0;
6127    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6128            MAX_TEST_PATTERN_CNT);
6129    for (size_t i = 0; i < count; i++) {
6130        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6131                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6132        if (NAME_NOT_FOUND != testpatternMode) {
6133            avail_testpattern_modes[size] = testpatternMode;
6134            size++;
6135        }
6136    }
6137    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6138                      avail_testpattern_modes,
6139                      size);
6140
6141    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6142    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6143                      &max_pipeline_depth,
6144                      1);
6145
6146    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6147    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6148                      &partial_result_count,
6149                       1);
6150
6151    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6152    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6153
6154    Vector<uint8_t> available_capabilities;
6155    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6156    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6157    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6158    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6159    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6160    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6161    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6162    if (hfrEnable && available_hfr_configs.array()) {
6163        available_capabilities.add(
6164                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6165    }
6166
6167    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6168        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6169    }
6170    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6171            available_capabilities.array(),
6172            available_capabilities.size());
6173
6174    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6175    //BURST_CAPTURE.
6176    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6177            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6178
6179    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6180            &aeLockAvailable, 1);
6181
6182    //awbLockAvailable to be set to true if capabilities has
6183    //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6184    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6185            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6186
6187    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6188            &awbLockAvailable, 1);
6189
6190    int32_t max_input_streams = 1;
6191    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6192                      &max_input_streams,
6193                      1);
6194
6195    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6196    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6197            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6198            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6199            HAL_PIXEL_FORMAT_YCbCr_420_888};
6200    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6201                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6202
6203    int32_t max_latency = (limitedDevice) ?
6204            CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6205    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6206                      &max_latency,
6207                      1);
6208
6209    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6210                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6211    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6212            available_hot_pixel_modes,
6213            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6214
6215    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6216                                         ANDROID_SHADING_MODE_FAST,
6217                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6218    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6219                      available_shading_modes,
6220                      3);
6221
6222    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6223                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6224    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6225                      available_lens_shading_map_modes,
6226                      2);
6227
6228    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6229                                      ANDROID_EDGE_MODE_FAST,
6230                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6231                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6232    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6233            available_edge_modes,
6234            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6235
6236    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6237                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6238                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6239                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6240                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6241    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6242            available_noise_red_modes,
6243            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6244
6245    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6246                                         ANDROID_TONEMAP_MODE_FAST,
6247                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6248    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6249            available_tonemap_modes,
6250            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6251
6252    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6253    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6254            available_hot_pixel_map_modes,
6255            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6256
6257    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6258            gCamCapability[cameraId]->reference_illuminant1);
6259    if (NAME_NOT_FOUND != val) {
6260        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6261        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6262    }
6263
6264    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6265            gCamCapability[cameraId]->reference_illuminant2);
6266    if (NAME_NOT_FOUND != val) {
6267        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6268        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6269    }
6270
6271    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6272            (void *)gCamCapability[cameraId]->forward_matrix1,
6273            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6274
6275    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6276            (void *)gCamCapability[cameraId]->forward_matrix2,
6277            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6278
6279    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6280            (void *)gCamCapability[cameraId]->color_transform1,
6281            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6282
6283    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6284            (void *)gCamCapability[cameraId]->color_transform2,
6285            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6286
6287    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6288            (void *)gCamCapability[cameraId]->calibration_transform1,
6289            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6290
6291    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6292            (void *)gCamCapability[cameraId]->calibration_transform2,
6293            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6294
6295    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6296       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6297       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6298       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6299       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6300       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6301       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6302       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6303       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6304       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6305       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6306       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6307       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6308       ANDROID_JPEG_GPS_COORDINATES,
6309       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6310       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6311       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6312       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6313       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6314       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6315       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6316       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6317       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6318       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6319       ANDROID_STATISTICS_FACE_DETECT_MODE,
6320       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6321       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6322       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6323       ANDROID_BLACK_LEVEL_LOCK };
6324
6325    size_t request_keys_cnt =
6326            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6327    Vector<int32_t> available_request_keys;
6328    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6329    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6330        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6331    }
6332
6333    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6334            available_request_keys.array(), available_request_keys.size());
6335
6336    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6337       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6338       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6339       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6340       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6341       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6342       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6343       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6344       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6345       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6346       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6347       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6348       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6349       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6350       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6351       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6352       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6353       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6354       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6355       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6356       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6357       ANDROID_STATISTICS_FACE_SCORES};
6358    size_t result_keys_cnt =
6359            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6360
6361    Vector<int32_t> available_result_keys;
6362    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6363    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6364        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6365    }
6366    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6367       available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6368       available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6369    }
6370    if (supportedFaceDetectMode == 1) {
6371        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6372        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6373    } else if ((supportedFaceDetectMode == 2) ||
6374            (supportedFaceDetectMode == 3)) {
6375        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6376        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6377    }
6378    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6379            available_result_keys.array(), available_result_keys.size());
6380
6381    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6382       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6383       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6384       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6385       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6386       ANDROID_SCALER_CROPPING_TYPE,
6387       ANDROID_SYNC_MAX_LATENCY,
6388       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6389       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6390       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6391       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6392       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6393       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6394       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6395       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6396       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6397       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6398       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6399       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6400       ANDROID_LENS_FACING,
6401       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6402       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6403       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6404       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6405       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6406       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6407       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6408       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6409       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6410       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6411       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6412       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6413       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6414       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6415       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6416       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6417       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6418       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6419       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6420       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6421       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6422       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6423       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6424       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6425       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6426       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6427       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6428       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6429       ANDROID_TONEMAP_MAX_CURVE_POINTS,
6430       ANDROID_CONTROL_AVAILABLE_MODES,
6431       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6432       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6433       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6434       ANDROID_SHADING_AVAILABLE_MODES,
6435       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6436    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6437                      available_characteristics_keys,
6438                      sizeof(available_characteristics_keys)/sizeof(int32_t));
6439
6440    /*available stall durations depend on the hw + sw and will be different for different devices */
6441    /*have to add for raw after implementation*/
6442    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6443    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6444
6445    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6446    size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6447            MAX_SIZES_CNT);
6448    size_t available_stall_size = count * 4;
6449    int64_t available_stall_durations[available_stall_size];
6450    idx = 0;
6451    for (uint32_t j = 0; j < stall_formats_count; j++) {
6452       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6453          for (uint32_t i = 0; i < count; i++) {
6454             available_stall_durations[idx]   = stall_formats[j];
6455             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6456             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6457             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6458             idx+=4;
6459          }
6460       } else {
6461          for (uint32_t i = 0; i < raw_count; i++) {
6462             available_stall_durations[idx]   = stall_formats[j];
6463             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6464             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6465             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6466             idx+=4;
6467          }
6468       }
6469    }
6470    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6471                      available_stall_durations,
6472                      idx);
6473    //QCAMERA3_OPAQUE_RAW
6474    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6475    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6476    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6477    case LEGACY_RAW:
6478        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6479            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6480        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6481            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6482        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6483            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6484        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6485        break;
6486    case MIPI_RAW:
6487        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6488            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6489        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6490            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6491        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6492            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6493        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6494        break;
6495    default:
6496        ALOGE("%s: unknown opaque_raw_format %d", __func__,
6497                gCamCapability[cameraId]->opaque_raw_fmt);
6498        break;
6499    }
6500    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6501
6502    int32_t strides[3*raw_count];
6503    for (size_t i = 0; i < raw_count; i++) {
6504        cam_stream_buf_plane_info_t buf_planes;
6505        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6506        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6507        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6508            &gCamCapability[cameraId]->padding_info, &buf_planes);
6509        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6510    }
6511    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6512            3*raw_count);
6513
6514    gStaticMetadata[cameraId] = staticInfo.release();
6515    return rc;
6516}
6517
6518/*===========================================================================
6519 * FUNCTION   : makeTable
6520 *
6521 * DESCRIPTION: make a table of sizes
6522 *
6523 * PARAMETERS :
6524 *
6525 *
6526 *==========================================================================*/
6527void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6528        size_t max_size, int32_t *sizeTable)
6529{
6530    size_t j = 0;
6531    if (size > max_size) {
6532       size = max_size;
6533    }
6534    for (size_t i = 0; i < size; i++) {
6535        sizeTable[j] = dimTable[i].width;
6536        sizeTable[j+1] = dimTable[i].height;
6537        j+=2;
6538    }
6539}
6540
6541/*===========================================================================
6542 * FUNCTION   : makeFPSTable
6543 *
6544 * DESCRIPTION: make a table of fps ranges
6545 *
6546 * PARAMETERS :
6547 *
6548 *==========================================================================*/
6549void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6550        size_t max_size, int32_t *fpsRangesTable)
6551{
6552    size_t j = 0;
6553    if (size > max_size) {
6554       size = max_size;
6555    }
6556    for (size_t i = 0; i < size; i++) {
6557        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6558        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6559        j+=2;
6560    }
6561}
6562
6563/*===========================================================================
6564 * FUNCTION   : makeOverridesList
6565 *
6566 * DESCRIPTION: make a list of scene mode overrides
6567 *
6568 * PARAMETERS :
6569 *
6570 *
6571 *==========================================================================*/
6572void QCamera3HardwareInterface::makeOverridesList(
6573        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6574        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6575{
6576    /*daemon will give a list of overrides for all scene modes.
6577      However we should send the fwk only the overrides for the scene modes
6578      supported by the framework*/
6579    size_t j = 0;
6580    if (size > max_size) {
6581       size = max_size;
6582    }
6583    size_t focus_count = CAM_FOCUS_MODE_MAX;
6584    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6585            focus_count);
6586    for (size_t i = 0; i < size; i++) {
6587        bool supt = false;
6588        size_t index = supported_indexes[i];
6589        overridesList[j] = gCamCapability[camera_id]->flash_available ?
6590                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6591        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6592                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6593                overridesTable[index].awb_mode);
6594        if (NAME_NOT_FOUND != val) {
6595            overridesList[j+1] = (uint8_t)val;
6596        }
6597        uint8_t focus_override = overridesTable[index].af_mode;
6598        for (size_t k = 0; k < focus_count; k++) {
6599           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6600              supt = true;
6601              break;
6602           }
6603        }
6604        if (supt) {
6605            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6606                    focus_override);
6607            if (NAME_NOT_FOUND != val) {
6608                overridesList[j+2] = (uint8_t)val;
6609            }
6610        } else {
6611           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6612        }
6613        j+=3;
6614    }
6615}
6616
6617/*===========================================================================
6618 * FUNCTION   : filterJpegSizes
6619 *
6620 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6621 *              could be downscaled to
6622 *
6623 * PARAMETERS :
6624 *
6625 * RETURN     : length of jpegSizes array
6626 *==========================================================================*/
6627
6628size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6629        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6630        uint8_t downscale_factor)
6631{
6632    if (0 == downscale_factor) {
6633        downscale_factor = 1;
6634    }
6635
6636    int32_t min_width = active_array_size.width / downscale_factor;
6637    int32_t min_height = active_array_size.height / downscale_factor;
6638    size_t jpegSizesCnt = 0;
6639    if (processedSizesCnt > maxCount) {
6640        processedSizesCnt = maxCount;
6641    }
6642    for (size_t i = 0; i < processedSizesCnt; i+=2) {
6643        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
6644            jpegSizes[jpegSizesCnt] = processedSizes[i];
6645            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
6646            jpegSizesCnt += 2;
6647        }
6648    }
6649    return jpegSizesCnt;
6650}
6651
6652/*===========================================================================
6653 * FUNCTION   : getPreviewHalPixelFormat
6654 *
6655 * DESCRIPTION: convert the format to type recognized by framework
6656 *
6657 * PARAMETERS : format : the format from backend
6658 *
6659 ** RETURN    : format recognized by framework
6660 *
6661 *==========================================================================*/
6662int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
6663{
6664    int32_t halPixelFormat;
6665
6666    switch (format) {
6667    case CAM_FORMAT_YUV_420_NV12:
6668        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
6669        break;
6670    case CAM_FORMAT_YUV_420_NV21:
6671        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6672        break;
6673    case CAM_FORMAT_YUV_420_NV21_ADRENO:
6674        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
6675        break;
6676    case CAM_FORMAT_YUV_420_YV12:
6677        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
6678        break;
6679    case CAM_FORMAT_YUV_422_NV16:
6680    case CAM_FORMAT_YUV_422_NV61:
6681    default:
6682        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6683        break;
6684    }
6685    return halPixelFormat;
6686}
6687
6688/*===========================================================================
6689 * FUNCTION   : computeNoiseModelEntryS
6690 *
6691 * DESCRIPTION: function to map a given sensitivity to the S noise
6692 *              model parameters in the DNG noise model.
6693 *
6694 * PARAMETERS : sens : the sensor sensitivity
6695 *
6696 ** RETURN    : S (sensor amplification) noise
6697 *
6698 *==========================================================================*/
6699double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
6700    double s = gCamCapability[mCameraId]->gradient_S * sens +
6701            gCamCapability[mCameraId]->offset_S;
6702    return ((s < 0.0) ? 0.0 : s);
6703}
6704
6705/*===========================================================================
6706 * FUNCTION   : computeNoiseModelEntryO
6707 *
6708 * DESCRIPTION: function to map a given sensitivity to the O noise
6709 *              model parameters in the DNG noise model.
6710 *
6711 * PARAMETERS : sens : the sensor sensitivity
6712 *
6713 ** RETURN    : O (sensor readout) noise
6714 *
6715 *==========================================================================*/
6716double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
6717    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
6718    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
6719            1.0 : (1.0 * sens / max_analog_sens);
6720    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
6721            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
6722    return ((o < 0.0) ? 0.0 : o);
6723}
6724
6725/*===========================================================================
6726 * FUNCTION   : getSensorSensitivity
6727 *
6728 * DESCRIPTION: convert iso_mode to an integer value
6729 *
6730 * PARAMETERS : iso_mode : the iso_mode supported by sensor
6731 *
6732 ** RETURN    : sensitivity supported by sensor
6733 *
6734 *==========================================================================*/
6735int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
6736{
6737    int32_t sensitivity;
6738
6739    switch (iso_mode) {
6740    case CAM_ISO_MODE_100:
6741        sensitivity = 100;
6742        break;
6743    case CAM_ISO_MODE_200:
6744        sensitivity = 200;
6745        break;
6746    case CAM_ISO_MODE_400:
6747        sensitivity = 400;
6748        break;
6749    case CAM_ISO_MODE_800:
6750        sensitivity = 800;
6751        break;
6752    case CAM_ISO_MODE_1600:
6753        sensitivity = 1600;
6754        break;
6755    default:
6756        sensitivity = -1;
6757        break;
6758    }
6759    return sensitivity;
6760}
6761
6762/*===========================================================================
6763 * FUNCTION   : getCamInfo
6764 *
6765 * DESCRIPTION: query camera capabilities
6766 *
6767 * PARAMETERS :
6768 *   @cameraId  : camera Id
6769 *   @info      : camera info struct to be filled in with camera capabilities
6770 *
6771 * RETURN     : int type of status
6772 *              NO_ERROR  -- success
6773 *              none-zero failure code
6774 *==========================================================================*/
6775int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
6776        struct camera_info *info)
6777{
6778    ATRACE_CALL();
6779    int rc = 0;
6780
6781    pthread_mutex_lock(&gCamLock);
6782    if (NULL == gCamCapability[cameraId]) {
6783        rc = initCapabilities(cameraId);
6784        if (rc < 0) {
6785            pthread_mutex_unlock(&gCamLock);
6786            return rc;
6787        }
6788    }
6789
6790    if (NULL == gStaticMetadata[cameraId]) {
6791        rc = initStaticMetadata(cameraId);
6792        if (rc < 0) {
6793            pthread_mutex_unlock(&gCamLock);
6794            return rc;
6795        }
6796    }
6797
6798    switch(gCamCapability[cameraId]->position) {
6799    case CAM_POSITION_BACK:
6800        info->facing = CAMERA_FACING_BACK;
6801        break;
6802
6803    case CAM_POSITION_FRONT:
6804        info->facing = CAMERA_FACING_FRONT;
6805        break;
6806
6807    default:
6808        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
6809        rc = -1;
6810        break;
6811    }
6812
6813
6814    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
6815    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
6816    info->static_camera_characteristics = gStaticMetadata[cameraId];
6817
6818    //For now assume both cameras can operate independently.
6819    info->conflicting_devices = NULL;
6820    info->conflicting_devices_length = 0;
6821
6822    //resource cost is 100 * MIN(1.0, m/M),
6823    //where m is throughput requirement with maximum stream configuration
6824    //and M is CPP maximum throughput.
6825    float max_fps = 0.0;
6826    for (uint32_t i = 0;
6827            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
6828        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
6829            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
6830    }
6831    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
6832            gCamCapability[cameraId]->active_array_size.width *
6833            gCamCapability[cameraId]->active_array_size.height * max_fps /
6834            gCamCapability[cameraId]->max_pixel_bandwidth;
6835    info->resource_cost = 100 * MIN(1.0, ratio);
6836    ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
6837            info->resource_cost);
6838
6839    pthread_mutex_unlock(&gCamLock);
6840    return rc;
6841}
6842
6843/*===========================================================================
6844 * FUNCTION   : translateCapabilityToMetadata
6845 *
6846 * DESCRIPTION: translate the capability into camera_metadata_t
6847 *
6848 * PARAMETERS : type of the request
6849 *
6850 *
6851 * RETURN     : success: camera_metadata_t*
6852 *              failure: NULL
6853 *
6854 *==========================================================================*/
6855camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
6856{
6857    if (mDefaultMetadata[type] != NULL) {
6858        return mDefaultMetadata[type];
6859    }
6860    //first time we are handling this request
6861    //fill up the metadata structure using the wrapper class
6862    CameraMetadata settings;
6863    //translate from cam_capability_t to camera_metadata_tag_t
6864    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
6865    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
6866    int32_t defaultRequestID = 0;
6867    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
6868
6869    /* OIS disable */
6870    char ois_prop[PROPERTY_VALUE_MAX];
6871    memset(ois_prop, 0, sizeof(ois_prop));
6872    property_get("persist.camera.ois.disable", ois_prop, "0");
6873    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
6874
6875    /* Force video to use OIS */
6876    char videoOisProp[PROPERTY_VALUE_MAX];
6877    memset(videoOisProp, 0, sizeof(videoOisProp));
6878    property_get("persist.camera.ois.video", videoOisProp, "1");
6879    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
6880
6881    uint8_t controlIntent = 0;
6882    uint8_t focusMode;
6883    uint8_t vsMode;
6884    uint8_t optStabMode;
6885    uint8_t cacMode;
6886    uint8_t edge_mode;
6887    uint8_t noise_red_mode;
6888    uint8_t tonemap_mode;
6889    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6890    switch (type) {
6891      case CAMERA3_TEMPLATE_PREVIEW:
6892        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
6893        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6894        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6895        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6896        edge_mode = ANDROID_EDGE_MODE_FAST;
6897        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6898        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6899        break;
6900      case CAMERA3_TEMPLATE_STILL_CAPTURE:
6901        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
6902        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6903        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6904        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
6905        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
6906        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
6907        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
6908        break;
6909      case CAMERA3_TEMPLATE_VIDEO_RECORD:
6910        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
6911        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6912        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6913        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6914        edge_mode = ANDROID_EDGE_MODE_FAST;
6915        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6916        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6917        if (forceVideoOis)
6918            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6919        break;
6920      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
6921        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
6922        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6923        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6924        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6925        edge_mode = ANDROID_EDGE_MODE_FAST;
6926        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6927        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6928        if (forceVideoOis)
6929            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6930        break;
6931      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
6932        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
6933        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6934        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6935        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6936        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
6937        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
6938        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6939        break;
6940      case CAMERA3_TEMPLATE_MANUAL:
6941        edge_mode = ANDROID_EDGE_MODE_FAST;
6942        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6943        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6944        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6945        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
6946        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6947        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6948        break;
6949      default:
6950        edge_mode = ANDROID_EDGE_MODE_FAST;
6951        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6952        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6953        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6954        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
6955        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6956        break;
6957    }
6958    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
6959    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
6960    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
6961    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
6962        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6963    }
6964    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
6965
6966    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6967            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
6968        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6969    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6970            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
6971            || ois_disable)
6972        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6973    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
6974
6975    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6976            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
6977
6978    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
6979    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
6980
6981    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
6982    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
6983
6984    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
6985    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
6986
6987    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
6988    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
6989
6990    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
6991    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
6992
6993    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
6994    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
6995
6996    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
6997    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
6998
6999    /*flash*/
7000    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7001    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7002
7003    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7004    settings.update(ANDROID_FLASH_FIRING_POWER,
7005            &flashFiringLevel, 1);
7006
7007    /* lens */
7008    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7009    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7010
7011    if (gCamCapability[mCameraId]->filter_densities_count) {
7012        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7013        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7014                        gCamCapability[mCameraId]->filter_densities_count);
7015    }
7016
7017    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7018    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7019
7020    float default_focus_distance = 0;
7021    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7022
7023    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7024    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7025
7026    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7027    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7028
7029    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7030    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7031
7032    /* face detection (default to OFF) */
7033    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7034    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7035
7036    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7037    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7038
7039    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7040    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7041
7042    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7043    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7044
7045    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7046    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7047
7048    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7049    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7050
7051    /* Exposure time(Update the Min Exposure Time)*/
7052    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7053    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7054
7055    /* frame duration */
7056    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7057    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7058
7059    /* sensitivity */
7060    static const int32_t default_sensitivity = 100;
7061    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7062
7063    /*edge mode*/
7064    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7065
7066    /*noise reduction mode*/
7067    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7068
7069    /*color correction mode*/
7070    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7071    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7072
7073    /*transform matrix mode*/
7074    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7075
7076    int32_t scaler_crop_region[4];
7077    scaler_crop_region[0] = 0;
7078    scaler_crop_region[1] = 0;
7079    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7080    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7081    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7082
7083    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7084    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7085
7086    /*focus distance*/
7087    float focus_distance = 0.0;
7088    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7089
7090    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7091    float max_range = 0.0;
7092    float max_fixed_fps = 0.0;
7093    int32_t fps_range[2] = {0, 0};
7094    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7095            i++) {
7096        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7097            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7098        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7099                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7100                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7101            if (range > max_range) {
7102                fps_range[0] =
7103                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7104                fps_range[1] =
7105                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7106                max_range = range;
7107            }
7108        } else {
7109            if (range < 0.01 && max_fixed_fps <
7110                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7111                fps_range[0] =
7112                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7113                fps_range[1] =
7114                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7115                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7116            }
7117        }
7118    }
7119    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7120
7121    /*precapture trigger*/
7122    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7123    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7124
7125    /*af trigger*/
7126    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7127    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7128
7129    /* ae & af regions */
7130    int32_t active_region[] = {
7131            gCamCapability[mCameraId]->active_array_size.left,
7132            gCamCapability[mCameraId]->active_array_size.top,
7133            gCamCapability[mCameraId]->active_array_size.left +
7134                    gCamCapability[mCameraId]->active_array_size.width,
7135            gCamCapability[mCameraId]->active_array_size.top +
7136                    gCamCapability[mCameraId]->active_array_size.height,
7137            0};
7138    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7139            sizeof(active_region) / sizeof(active_region[0]));
7140    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7141            sizeof(active_region) / sizeof(active_region[0]));
7142
7143    /* black level lock */
7144    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7145    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7146
7147    /* lens shading map mode */
7148    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7149    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7150        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7151    }
7152    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7153
7154    //special defaults for manual template
7155    if (type == CAMERA3_TEMPLATE_MANUAL) {
7156        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7157        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7158
7159        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7160        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7161
7162        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7163        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7164
7165        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7166        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7167
7168        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7169        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7170
7171        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7172        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7173    }
7174
7175
7176    /* TNR
7177     * We'll use this location to determine which modes TNR will be set.
7178     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7179     * This is not to be confused with linking on a per stream basis that decision
7180     * is still on per-session basis and will be handled as part of config stream
7181     */
7182    uint8_t tnr_enable = 0;
7183
7184    if (m_bTnrPreview || m_bTnrVideo) {
7185
7186        switch (type) {
7187            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7188            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7189                    tnr_enable = 1;
7190                    break;
7191
7192            default:
7193                    tnr_enable = 0;
7194                    break;
7195        }
7196
7197        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7198        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7199        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7200
7201        CDBG("%s: TNR:%d with process plate %d for template:%d",
7202                            __func__, tnr_enable, tnr_process_type, type);
7203    }
7204
7205    /* CDS default */
7206    char prop[PROPERTY_VALUE_MAX];
7207    memset(prop, 0, sizeof(prop));
7208    property_get("persist.camera.CDS", prop, "Auto");
7209    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7210    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7211    if (CAM_CDS_MODE_MAX == cds_mode) {
7212        cds_mode = CAM_CDS_MODE_AUTO;
7213    }
7214    m_CdsPreference = cds_mode;
7215
7216    /* Disabling CDS in templates which have TNR enabled*/
7217    if (tnr_enable)
7218        cds_mode = CAM_CDS_MODE_OFF;
7219
7220    int32_t mode = cds_mode;
7221    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7222    mDefaultMetadata[type] = settings.release();
7223
7224    return mDefaultMetadata[type];
7225}
7226
7227/*===========================================================================
7228 * FUNCTION   : setFrameParameters
7229 *
7230 * DESCRIPTION: set parameters per frame as requested in the metadata from
7231 *              framework
7232 *
7233 * PARAMETERS :
7234 *   @request   : request that needs to be serviced
7235 *   @streamID : Stream ID of all the requested streams
7236 *   @blob_request: Whether this request is a blob request or not
7237 *
7238 * RETURN     : success: NO_ERROR
7239 *              failure:
7240 *==========================================================================*/
7241int QCamera3HardwareInterface::setFrameParameters(
7242                    camera3_capture_request_t *request,
7243                    cam_stream_ID_t streamID,
7244                    int blob_request,
7245                    uint32_t snapshotStreamId)
7246{
7247    /*translate from camera_metadata_t type to parm_type_t*/
7248    int rc = 0;
7249    int32_t hal_version = CAM_HAL_V3;
7250
7251    clear_metadata_buffer(mParameters);
7252    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7253        ALOGE("%s: Failed to set hal version in the parameters", __func__);
7254        return BAD_VALUE;
7255    }
7256
7257    /*we need to update the frame number in the parameters*/
7258    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7259            request->frame_number)) {
7260        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7261        return BAD_VALUE;
7262    }
7263
7264    /* Update stream id of all the requested buffers */
7265    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7266        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7267        return BAD_VALUE;
7268    }
7269
7270    if (mUpdateDebugLevel) {
7271        uint32_t dummyDebugLevel = 0;
7272        /* The value of dummyDebugLevel is irrelavent. On
7273         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7274        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7275                dummyDebugLevel)) {
7276            ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7277            return BAD_VALUE;
7278        }
7279        mUpdateDebugLevel = false;
7280    }
7281
7282    if(request->settings != NULL){
7283        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7284        if (blob_request)
7285            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7286    }
7287
7288    return rc;
7289}
7290
7291/*===========================================================================
7292 * FUNCTION   : setReprocParameters
7293 *
7294 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7295 *              return it.
7296 *
7297 * PARAMETERS :
7298 *   @request   : request that needs to be serviced
7299 *
7300 * RETURN     : success: NO_ERROR
7301 *              failure:
7302 *==========================================================================*/
7303int32_t QCamera3HardwareInterface::setReprocParameters(
7304        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7305        uint32_t snapshotStreamId)
7306{
7307    /*translate from camera_metadata_t type to parm_type_t*/
7308    int rc = 0;
7309
7310    if (NULL == request->settings){
7311        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7312        return BAD_VALUE;
7313    }
7314
7315    if (NULL == reprocParam) {
7316        ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7317        return BAD_VALUE;
7318    }
7319    clear_metadata_buffer(reprocParam);
7320
7321    /*we need to update the frame number in the parameters*/
7322    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7323            request->frame_number)) {
7324        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7325        return BAD_VALUE;
7326    }
7327
7328    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7329    if (rc < 0) {
7330        ALOGE("%s: Failed to translate reproc request", __func__);
7331        return rc;
7332    }
7333
7334    CameraMetadata frame_settings;
7335    frame_settings = request->settings;
7336    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7337            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7338        int32_t *crop_count =
7339                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7340        int32_t *crop_data =
7341                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7342        int32_t *roi_map =
7343                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7344        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7345            cam_crop_data_t crop_meta;
7346            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7347            crop_meta.num_of_streams = 1;
7348            crop_meta.crop_info[0].crop.left   = crop_data[0];
7349            crop_meta.crop_info[0].crop.top    = crop_data[1];
7350            crop_meta.crop_info[0].crop.width  = crop_data[2];
7351            crop_meta.crop_info[0].crop.height = crop_data[3];
7352
7353            crop_meta.crop_info[0].roi_map.left =
7354                    roi_map[0];
7355            crop_meta.crop_info[0].roi_map.top =
7356                    roi_map[1];
7357            crop_meta.crop_info[0].roi_map.width =
7358                    roi_map[2];
7359            crop_meta.crop_info[0].roi_map.height =
7360                    roi_map[3];
7361
7362            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7363                rc = BAD_VALUE;
7364            }
7365            CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7366                    __func__,
7367                    request->input_buffer->stream,
7368                    crop_meta.crop_info[0].crop.left,
7369                    crop_meta.crop_info[0].crop.top,
7370                    crop_meta.crop_info[0].crop.width,
7371                    crop_meta.crop_info[0].crop.height);
7372            CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7373                    __func__,
7374                    request->input_buffer->stream,
7375                    crop_meta.crop_info[0].roi_map.left,
7376                    crop_meta.crop_info[0].roi_map.top,
7377                    crop_meta.crop_info[0].roi_map.width,
7378                    crop_meta.crop_info[0].roi_map.height);
7379            } else {
7380                ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7381            }
7382    } else {
7383        ALOGE("%s: No crop data from matching output stream", __func__);
7384    }
7385
7386    /* These settings are not needed for regular requests so handle them specially for
7387       reprocess requests; information needed for EXIF tags */
7388    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7389        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7390                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7391        if (NAME_NOT_FOUND != val) {
7392            uint32_t flashMode = (uint32_t)val;
7393            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7394                rc = BAD_VALUE;
7395            }
7396        } else {
7397            ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7398                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7399        }
7400    } else {
7401        CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7402    }
7403
7404    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7405        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7406        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7407            rc = BAD_VALUE;
7408        }
7409    } else {
7410        CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7411    }
7412
7413    return rc;
7414}
7415
7416/*===========================================================================
7417 * FUNCTION   : saveRequestSettings
7418 *
7419 * DESCRIPTION: Add any settings that might have changed to the request settings
7420 *              and save the settings to be applied on the frame
7421 *
7422 * PARAMETERS :
7423 *   @jpegMetadata : the extracted and/or modified jpeg metadata
7424 *   @request      : request with initial settings
7425 *
7426 * RETURN     :
7427 * camera_metadata_t* : pointer to the saved request settings
7428 *==========================================================================*/
7429camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7430        const CameraMetadata &jpegMetadata,
7431        camera3_capture_request_t *request)
7432{
7433    camera_metadata_t *resultMetadata;
7434    CameraMetadata camMetadata;
7435    camMetadata = request->settings;
7436
7437    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7438        int32_t thumbnail_size[2];
7439        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7440        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7441        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7442                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7443    }
7444
7445    resultMetadata = camMetadata.release();
7446    return resultMetadata;
7447}
7448
7449/*===========================================================================
7450 * FUNCTION   : setHalFpsRange
7451 *
7452 * DESCRIPTION: set FPS range parameter
7453 *
7454 *
7455 * PARAMETERS :
7456 *   @settings    : Metadata from framework
7457 *   @hal_metadata: Metadata buffer
7458 *
7459 *
7460 * RETURN     : success: NO_ERROR
7461 *              failure:
7462 *==========================================================================*/
7463int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7464        metadata_buffer_t *hal_metadata)
7465{
7466    int32_t rc = NO_ERROR;
7467    cam_fps_range_t fps_range;
7468    fps_range.min_fps = (float)
7469            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7470    fps_range.max_fps = (float)
7471            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7472    fps_range.video_min_fps = fps_range.min_fps;
7473    fps_range.video_max_fps = fps_range.max_fps;
7474
7475    CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7476            fps_range.min_fps, fps_range.max_fps);
7477    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7478     * follows:
7479     * ---------------------------------------------------------------|
7480     *      Video stream is absent in configure_streams               |
7481     *    (Camcorder preview before the first video record            |
7482     * ---------------------------------------------------------------|
7483     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7484     *                   |             |             | vid_min/max_fps|
7485     * ---------------------------------------------------------------|
7486     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7487     *                   |-------------|-------------|----------------|
7488     *                   |  [240, 240] |     240     |  [240, 240]    |
7489     * ---------------------------------------------------------------|
7490     *     Video stream is present in configure_streams               |
7491     * ---------------------------------------------------------------|
7492     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7493     *                   |             |             | vid_min/max_fps|
7494     * ---------------------------------------------------------------|
7495     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7496     * (camcorder prev   |-------------|-------------|----------------|
7497     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
7498     *  is stopped)      |             |             |                |
7499     * ---------------------------------------------------------------|
7500     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
7501     *                   |-------------|-------------|----------------|
7502     *                   |  [240, 240] |     240     |  [240, 240]    |
7503     * ---------------------------------------------------------------|
7504     * When Video stream is absent in configure_streams,
7505     * preview fps = sensor_fps / batchsize
7506     * Eg: for 240fps at batchSize 4, preview = 60fps
7507     *     for 120fps at batchSize 4, preview = 30fps
7508     *
7509     * When video stream is present in configure_streams, preview fps is as per
7510     * the ratio of preview buffers to video buffers requested in process
7511     * capture request
7512     */
7513    mBatchSize = 0;
7514    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7515        fps_range.min_fps = fps_range.video_max_fps;
7516        fps_range.video_min_fps = fps_range.video_max_fps;
7517        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7518                fps_range.max_fps);
7519        if (NAME_NOT_FOUND != val) {
7520            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7521            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7522                return BAD_VALUE;
7523            }
7524
7525            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7526                /* If batchmode is currently in progress and the fps changes,
7527                 * set the flag to restart the sensor */
7528                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7529                        (mHFRVideoFps != fps_range.max_fps)) {
7530                    mNeedSensorRestart = true;
7531                }
7532                mHFRVideoFps = fps_range.max_fps;
7533                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7534                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7535                    mBatchSize = MAX_HFR_BATCH_SIZE;
7536                }
7537             }
7538            CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7539
7540         }
7541    } else {
7542        /* HFR mode is session param in backend/ISP. This should be reset when
7543         * in non-HFR mode  */
7544        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7545        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7546            return BAD_VALUE;
7547        }
7548    }
7549    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7550        return BAD_VALUE;
7551    }
7552    CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7553            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7554    return rc;
7555}
7556
7557/*===========================================================================
7558 * FUNCTION   : translateToHalMetadata
7559 *
7560 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7561 *
7562 *
7563 * PARAMETERS :
7564 *   @request  : request sent from framework
7565 *
7566 *
7567 * RETURN     : success: NO_ERROR
7568 *              failure:
7569 *==========================================================================*/
7570int QCamera3HardwareInterface::translateToHalMetadata
7571                                  (const camera3_capture_request_t *request,
7572                                   metadata_buffer_t *hal_metadata,
7573                                   uint32_t snapshotStreamId)
7574{
7575    int rc = 0;
7576    CameraMetadata frame_settings;
7577    frame_settings = request->settings;
7578
7579    /* Do not change the order of the following list unless you know what you are
7580     * doing.
7581     * The order is laid out in such a way that parameters in the front of the table
7582     * may be used to override the parameters later in the table. Examples are:
7583     * 1. META_MODE should precede AEC/AWB/AF MODE
7584     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7585     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7586     * 4. Any mode should precede it's corresponding settings
7587     */
7588    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7589        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7590        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7591            rc = BAD_VALUE;
7592        }
7593        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7594        if (rc != NO_ERROR) {
7595            ALOGE("%s: extractSceneMode failed", __func__);
7596        }
7597    }
7598
7599    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7600        uint8_t fwk_aeMode =
7601            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7602        uint8_t aeMode;
7603        int32_t redeye;
7604
7605        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
7606            aeMode = CAM_AE_MODE_OFF;
7607        } else {
7608            aeMode = CAM_AE_MODE_ON;
7609        }
7610        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
7611            redeye = 1;
7612        } else {
7613            redeye = 0;
7614        }
7615
7616        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7617                fwk_aeMode);
7618        if (NAME_NOT_FOUND != val) {
7619            int32_t flashMode = (int32_t)val;
7620            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
7621        }
7622
7623        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
7624        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
7625            rc = BAD_VALUE;
7626        }
7627    }
7628
7629    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
7630        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
7631        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7632                fwk_whiteLevel);
7633        if (NAME_NOT_FOUND != val) {
7634            uint8_t whiteLevel = (uint8_t)val;
7635            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
7636                rc = BAD_VALUE;
7637            }
7638        }
7639    }
7640
7641    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
7642        uint8_t fwk_cacMode =
7643                frame_settings.find(
7644                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
7645        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7646                fwk_cacMode);
7647        if (NAME_NOT_FOUND != val) {
7648            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
7649            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
7650                rc = BAD_VALUE;
7651            }
7652        } else {
7653            ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
7654        }
7655    }
7656
7657    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
7658        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
7659        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7660                fwk_focusMode);
7661        if (NAME_NOT_FOUND != val) {
7662            uint8_t focusMode = (uint8_t)val;
7663            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
7664                rc = BAD_VALUE;
7665            }
7666        }
7667    }
7668
7669    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
7670        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
7671        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
7672                focalDistance)) {
7673            rc = BAD_VALUE;
7674        }
7675    }
7676
7677    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
7678        uint8_t fwk_antibandingMode =
7679                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
7680        int val = lookupHalName(ANTIBANDING_MODES_MAP,
7681                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
7682        if (NAME_NOT_FOUND != val) {
7683            uint32_t hal_antibandingMode = (uint32_t)val;
7684            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
7685                    hal_antibandingMode)) {
7686                rc = BAD_VALUE;
7687            }
7688        }
7689    }
7690
7691    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
7692        int32_t expCompensation = frame_settings.find(
7693                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
7694        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
7695            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
7696        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
7697            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
7698        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
7699                expCompensation)) {
7700            rc = BAD_VALUE;
7701        }
7702    }
7703
7704    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
7705        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
7706        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
7707            rc = BAD_VALUE;
7708        }
7709    }
7710    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
7711        rc = setHalFpsRange(frame_settings, hal_metadata);
7712        if (rc != NO_ERROR) {
7713            ALOGE("%s: setHalFpsRange failed", __func__);
7714        }
7715    }
7716
7717    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
7718        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
7719        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
7720            rc = BAD_VALUE;
7721        }
7722    }
7723
7724    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
7725        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
7726        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7727                fwk_effectMode);
7728        if (NAME_NOT_FOUND != val) {
7729            uint8_t effectMode = (uint8_t)val;
7730            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
7731                rc = BAD_VALUE;
7732            }
7733        }
7734    }
7735
7736    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
7737        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
7738        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
7739                colorCorrectMode)) {
7740            rc = BAD_VALUE;
7741        }
7742    }
7743
7744    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
7745        cam_color_correct_gains_t colorCorrectGains;
7746        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
7747            colorCorrectGains.gains[i] =
7748                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
7749        }
7750        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
7751                colorCorrectGains)) {
7752            rc = BAD_VALUE;
7753        }
7754    }
7755
7756    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
7757        cam_color_correct_matrix_t colorCorrectTransform;
7758        cam_rational_type_t transform_elem;
7759        size_t num = 0;
7760        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
7761           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
7762              transform_elem.numerator =
7763                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
7764              transform_elem.denominator =
7765                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
7766              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
7767              num++;
7768           }
7769        }
7770        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
7771                colorCorrectTransform)) {
7772            rc = BAD_VALUE;
7773        }
7774    }
7775
7776    cam_trigger_t aecTrigger;
7777    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
7778    aecTrigger.trigger_id = -1;
7779    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
7780        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
7781        aecTrigger.trigger =
7782            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
7783        aecTrigger.trigger_id =
7784            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
7785        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
7786                aecTrigger)) {
7787            rc = BAD_VALUE;
7788        }
7789        CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
7790                aecTrigger.trigger, aecTrigger.trigger_id);
7791    }
7792
7793    /*af_trigger must come with a trigger id*/
7794    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
7795        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
7796        cam_trigger_t af_trigger;
7797        af_trigger.trigger =
7798            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
7799        af_trigger.trigger_id =
7800            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
7801        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
7802            rc = BAD_VALUE;
7803        }
7804        CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
7805                af_trigger.trigger, af_trigger.trigger_id);
7806    }
7807
7808    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
7809        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
7810        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
7811            rc = BAD_VALUE;
7812        }
7813    }
7814    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
7815        cam_edge_application_t edge_application;
7816        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
7817        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
7818            edge_application.sharpness = 0;
7819        } else {
7820            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
7821        }
7822        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
7823            rc = BAD_VALUE;
7824        }
7825    }
7826
7827    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7828        int32_t respectFlashMode = 1;
7829        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7830            uint8_t fwk_aeMode =
7831                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7832            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
7833                respectFlashMode = 0;
7834                CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
7835                    __func__);
7836            }
7837        }
7838        if (respectFlashMode) {
7839            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7840                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7841            CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
7842            // To check: CAM_INTF_META_FLASH_MODE usage
7843            if (NAME_NOT_FOUND != val) {
7844                uint8_t flashMode = (uint8_t)val;
7845                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
7846                    rc = BAD_VALUE;
7847                }
7848            }
7849        }
7850    }
7851
7852    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
7853        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
7854        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
7855            rc = BAD_VALUE;
7856        }
7857    }
7858
7859    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
7860        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
7861        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
7862                flashFiringTime)) {
7863            rc = BAD_VALUE;
7864        }
7865    }
7866
7867    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
7868        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
7869        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
7870                hotPixelMode)) {
7871            rc = BAD_VALUE;
7872        }
7873    }
7874
7875    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
7876        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
7877        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
7878                lensAperture)) {
7879            rc = BAD_VALUE;
7880        }
7881    }
7882
7883    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
7884        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
7885        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
7886                filterDensity)) {
7887            rc = BAD_VALUE;
7888        }
7889    }
7890
7891    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
7892        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
7893        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH, focalLength)) {
7894            rc = BAD_VALUE;
7895        }
7896    }
7897
7898    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
7899        uint8_t optStabMode =
7900                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
7901        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE, optStabMode)) {
7902            rc = BAD_VALUE;
7903        }
7904    }
7905
7906    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
7907        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
7908        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
7909                noiseRedMode)) {
7910            rc = BAD_VALUE;
7911        }
7912    }
7913
7914    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
7915        float reprocessEffectiveExposureFactor =
7916            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
7917        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
7918                reprocessEffectiveExposureFactor)) {
7919            rc = BAD_VALUE;
7920        }
7921    }
7922
7923    cam_crop_region_t scalerCropRegion;
7924    bool scalerCropSet = false;
7925    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
7926        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
7927        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
7928        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
7929        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
7930
7931        // Map coordinate system from active array to sensor output.
7932        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
7933                scalerCropRegion.width, scalerCropRegion.height);
7934
7935        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
7936                scalerCropRegion)) {
7937            rc = BAD_VALUE;
7938        }
7939        scalerCropSet = true;
7940    }
7941
7942    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
7943        int64_t sensorExpTime =
7944                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
7945        CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
7946        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
7947                sensorExpTime)) {
7948            rc = BAD_VALUE;
7949        }
7950    }
7951
7952    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
7953        int64_t sensorFrameDuration =
7954                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
7955        int64_t minFrameDuration = getMinFrameDuration(request);
7956        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
7957        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
7958            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
7959        CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
7960        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
7961                sensorFrameDuration)) {
7962            rc = BAD_VALUE;
7963        }
7964    }
7965
7966    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
7967        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
7968        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
7969                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
7970        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
7971                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
7972        CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
7973        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
7974                sensorSensitivity)) {
7975            rc = BAD_VALUE;
7976        }
7977    }
7978
7979    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
7980        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
7981        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
7982            rc = BAD_VALUE;
7983        }
7984    }
7985
7986    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
7987        uint8_t fwk_facedetectMode =
7988                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
7989
7990        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7991                fwk_facedetectMode);
7992
7993        if (NAME_NOT_FOUND != val) {
7994            uint8_t facedetectMode = (uint8_t)val;
7995            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
7996                    facedetectMode)) {
7997                rc = BAD_VALUE;
7998            }
7999        }
8000    }
8001
8002    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8003        uint8_t histogramMode =
8004                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8005        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8006                histogramMode)) {
8007            rc = BAD_VALUE;
8008        }
8009    }
8010
8011    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8012        uint8_t sharpnessMapMode =
8013                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8014        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8015                sharpnessMapMode)) {
8016            rc = BAD_VALUE;
8017        }
8018    }
8019
8020    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8021        uint8_t tonemapMode =
8022                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8023        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8024            rc = BAD_VALUE;
8025        }
8026    }
8027    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8028    /*All tonemap channels will have the same number of points*/
8029    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8030        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8031        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8032        cam_rgb_tonemap_curves tonemapCurves;
8033        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8034        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8035            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8036                    __func__, tonemapCurves.tonemap_points_cnt,
8037                    CAM_MAX_TONEMAP_CURVE_SIZE);
8038            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8039        }
8040
8041        /* ch0 = G*/
8042        size_t point = 0;
8043        cam_tonemap_curve_t tonemapCurveGreen;
8044        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8045            for (size_t j = 0; j < 2; j++) {
8046               tonemapCurveGreen.tonemap_points[i][j] =
8047                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8048               point++;
8049            }
8050        }
8051        tonemapCurves.curves[0] = tonemapCurveGreen;
8052
8053        /* ch 1 = B */
8054        point = 0;
8055        cam_tonemap_curve_t tonemapCurveBlue;
8056        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8057            for (size_t j = 0; j < 2; j++) {
8058               tonemapCurveBlue.tonemap_points[i][j] =
8059                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8060               point++;
8061            }
8062        }
8063        tonemapCurves.curves[1] = tonemapCurveBlue;
8064
8065        /* ch 2 = R */
8066        point = 0;
8067        cam_tonemap_curve_t tonemapCurveRed;
8068        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8069            for (size_t j = 0; j < 2; j++) {
8070               tonemapCurveRed.tonemap_points[i][j] =
8071                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8072               point++;
8073            }
8074        }
8075        tonemapCurves.curves[2] = tonemapCurveRed;
8076
8077        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8078                tonemapCurves)) {
8079            rc = BAD_VALUE;
8080        }
8081    }
8082
8083    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8084        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8085        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8086                captureIntent)) {
8087            rc = BAD_VALUE;
8088        }
8089    }
8090
8091    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8092        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8093        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8094                blackLevelLock)) {
8095            rc = BAD_VALUE;
8096        }
8097    }
8098
8099    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8100        uint8_t lensShadingMapMode =
8101                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8102        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8103                lensShadingMapMode)) {
8104            rc = BAD_VALUE;
8105        }
8106    }
8107
8108    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8109        cam_area_t roi;
8110        bool reset = true;
8111        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8112
8113        // Map coordinate system from active array to sensor output.
8114        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8115                roi.rect.height);
8116
8117        if (scalerCropSet) {
8118            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8119        }
8120        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8121            rc = BAD_VALUE;
8122        }
8123    }
8124
8125    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8126        cam_area_t roi;
8127        bool reset = true;
8128        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8129
8130        // Map coordinate system from active array to sensor output.
8131        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8132                roi.rect.height);
8133
8134        if (scalerCropSet) {
8135            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8136        }
8137        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8138            rc = BAD_VALUE;
8139        }
8140    }
8141
8142    if (m_bIs4KVideo) {
8143        /* Override needed for Video template in case of 4K video */
8144        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8145                CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8146            rc = BAD_VALUE;
8147        }
8148    } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8149            frame_settings.exists(QCAMERA3_CDS_MODE)) {
8150        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8151        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8152            ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8153        } else {
8154            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8155                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8156                rc = BAD_VALUE;
8157            }
8158        }
8159    }
8160
8161    // TNR
8162    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8163        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8164        uint8_t b_TnrRequested = 0;
8165        cam_denoise_param_t tnr;
8166        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8167        tnr.process_plates =
8168            (cam_denoise_process_type_t)frame_settings.find(
8169            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8170        b_TnrRequested = tnr.denoise_enable;
8171        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8172            rc = BAD_VALUE;
8173        }
8174    }
8175
8176    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8177        int32_t fwk_testPatternMode =
8178                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8179        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8180                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8181
8182        if (NAME_NOT_FOUND != testPatternMode) {
8183            cam_test_pattern_data_t testPatternData;
8184            memset(&testPatternData, 0, sizeof(testPatternData));
8185            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8186            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8187                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8188                int32_t *fwk_testPatternData =
8189                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8190                testPatternData.r = fwk_testPatternData[0];
8191                testPatternData.b = fwk_testPatternData[3];
8192                switch (gCamCapability[mCameraId]->color_arrangement) {
8193                    case CAM_FILTER_ARRANGEMENT_RGGB:
8194                    case CAM_FILTER_ARRANGEMENT_GRBG:
8195                        testPatternData.gr = fwk_testPatternData[1];
8196                        testPatternData.gb = fwk_testPatternData[2];
8197                        break;
8198                    case CAM_FILTER_ARRANGEMENT_GBRG:
8199                    case CAM_FILTER_ARRANGEMENT_BGGR:
8200                        testPatternData.gr = fwk_testPatternData[2];
8201                        testPatternData.gb = fwk_testPatternData[1];
8202                        break;
8203                    default:
8204                        ALOGE("%s: color arrangement %d is not supported", __func__,
8205                                gCamCapability[mCameraId]->color_arrangement);
8206                        break;
8207                }
8208            }
8209            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8210                    testPatternData)) {
8211                rc = BAD_VALUE;
8212            }
8213        } else {
8214            ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8215                    fwk_testPatternMode);
8216        }
8217    }
8218
8219    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8220        size_t count = 0;
8221        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8222        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8223                gps_coords.data.d, gps_coords.count, count);
8224        if (gps_coords.count != count) {
8225            rc = BAD_VALUE;
8226        }
8227    }
8228
8229    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8230        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8231        size_t count = 0;
8232        const char *gps_methods_src = (const char *)
8233                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8234        memset(gps_methods, '\0', sizeof(gps_methods));
8235        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8236        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8237                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8238        if (GPS_PROCESSING_METHOD_SIZE != count) {
8239            rc = BAD_VALUE;
8240        }
8241    }
8242
8243    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8244        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8245        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8246                gps_timestamp)) {
8247            rc = BAD_VALUE;
8248        }
8249    }
8250
8251    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8252        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8253        cam_rotation_info_t rotation_info;
8254        if (orientation == 0) {
8255           rotation_info.rotation = ROTATE_0;
8256        } else if (orientation == 90) {
8257           rotation_info.rotation = ROTATE_90;
8258        } else if (orientation == 180) {
8259           rotation_info.rotation = ROTATE_180;
8260        } else if (orientation == 270) {
8261           rotation_info.rotation = ROTATE_270;
8262        }
8263        rotation_info.streamId = snapshotStreamId;
8264        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8265        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8266            rc = BAD_VALUE;
8267        }
8268    }
8269
8270    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8271        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8272        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8273            rc = BAD_VALUE;
8274        }
8275    }
8276
8277    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8278        uint32_t thumb_quality = (uint32_t)
8279                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8280        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8281                thumb_quality)) {
8282            rc = BAD_VALUE;
8283        }
8284    }
8285
8286    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8287        cam_dimension_t dim;
8288        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8289        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8290        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8291            rc = BAD_VALUE;
8292        }
8293    }
8294
8295    // Internal metadata
8296    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8297        size_t count = 0;
8298        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8299        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8300                privatedata.data.i32, privatedata.count, count);
8301        if (privatedata.count != count) {
8302            rc = BAD_VALUE;
8303        }
8304    }
8305
8306    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8307        uint8_t* use_av_timer =
8308                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8309        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8310            rc = BAD_VALUE;
8311        }
8312    }
8313
8314    // EV step
8315    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8316            gCamCapability[mCameraId]->exp_compensation_step)) {
8317        rc = BAD_VALUE;
8318    }
8319
8320    // CDS info
8321    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8322        cam_cds_data_t *cdsData = (cam_cds_data_t *)
8323                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8324
8325        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8326                CAM_INTF_META_CDS_DATA, *cdsData)) {
8327            rc = BAD_VALUE;
8328        }
8329    }
8330
8331    return rc;
8332}
8333
8334/*===========================================================================
8335 * FUNCTION   : captureResultCb
8336 *
8337 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8338 *
8339 * PARAMETERS :
8340 *   @frame  : frame information from mm-camera-interface
8341 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8342 *   @userdata: userdata
8343 *
8344 * RETURN     : NONE
8345 *==========================================================================*/
8346void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8347                camera3_stream_buffer_t *buffer,
8348                uint32_t frame_number, void *userdata)
8349{
8350    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8351    if (hw == NULL) {
8352        ALOGE("%s: Invalid hw %p", __func__, hw);
8353        return;
8354    }
8355
8356    hw->captureResultCb(metadata, buffer, frame_number);
8357    return;
8358}
8359
8360
8361/*===========================================================================
8362 * FUNCTION   : initialize
8363 *
8364 * DESCRIPTION: Pass framework callback pointers to HAL
8365 *
8366 * PARAMETERS :
8367 *
8368 *
8369 * RETURN     : Success : 0
8370 *              Failure: -ENODEV
8371 *==========================================================================*/
8372
8373int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8374                                  const camera3_callback_ops_t *callback_ops)
8375{
8376    CDBG("%s: E", __func__);
8377    QCamera3HardwareInterface *hw =
8378        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8379    if (!hw) {
8380        ALOGE("%s: NULL camera device", __func__);
8381        return -ENODEV;
8382    }
8383
8384    int rc = hw->initialize(callback_ops);
8385    CDBG("%s: X", __func__);
8386    return rc;
8387}
8388
8389/*===========================================================================
8390 * FUNCTION   : configure_streams
8391 *
8392 * DESCRIPTION:
8393 *
8394 * PARAMETERS :
8395 *
8396 *
8397 * RETURN     : Success: 0
8398 *              Failure: -EINVAL (if stream configuration is invalid)
8399 *                       -ENODEV (fatal error)
8400 *==========================================================================*/
8401
8402int QCamera3HardwareInterface::configure_streams(
8403        const struct camera3_device *device,
8404        camera3_stream_configuration_t *stream_list)
8405{
8406    CDBG("%s: E", __func__);
8407    QCamera3HardwareInterface *hw =
8408        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8409    if (!hw) {
8410        ALOGE("%s: NULL camera device", __func__);
8411        return -ENODEV;
8412    }
8413    int rc = hw->configureStreams(stream_list);
8414    CDBG("%s: X", __func__);
8415    return rc;
8416}
8417
8418/*===========================================================================
8419 * FUNCTION   : construct_default_request_settings
8420 *
8421 * DESCRIPTION: Configure a settings buffer to meet the required use case
8422 *
8423 * PARAMETERS :
8424 *
8425 *
8426 * RETURN     : Success: Return valid metadata
8427 *              Failure: Return NULL
8428 *==========================================================================*/
8429const camera_metadata_t* QCamera3HardwareInterface::
8430    construct_default_request_settings(const struct camera3_device *device,
8431                                        int type)
8432{
8433
8434    CDBG("%s: E", __func__);
8435    camera_metadata_t* fwk_metadata = NULL;
8436    QCamera3HardwareInterface *hw =
8437        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8438    if (!hw) {
8439        ALOGE("%s: NULL camera device", __func__);
8440        return NULL;
8441    }
8442
8443    fwk_metadata = hw->translateCapabilityToMetadata(type);
8444
8445    CDBG("%s: X", __func__);
8446    return fwk_metadata;
8447}
8448
8449/*===========================================================================
8450 * FUNCTION   : process_capture_request
8451 *
8452 * DESCRIPTION:
8453 *
8454 * PARAMETERS :
8455 *
8456 *
8457 * RETURN     :
8458 *==========================================================================*/
8459int QCamera3HardwareInterface::process_capture_request(
8460                    const struct camera3_device *device,
8461                    camera3_capture_request_t *request)
8462{
8463    CDBG("%s: E", __func__);
8464    QCamera3HardwareInterface *hw =
8465        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8466    if (!hw) {
8467        ALOGE("%s: NULL camera device", __func__);
8468        return -EINVAL;
8469    }
8470
8471    int rc = hw->processCaptureRequest(request);
8472    CDBG("%s: X", __func__);
8473    return rc;
8474}
8475
8476/*===========================================================================
8477 * FUNCTION   : dump
8478 *
8479 * DESCRIPTION:
8480 *
8481 * PARAMETERS :
8482 *
8483 *
8484 * RETURN     :
8485 *==========================================================================*/
8486
8487void QCamera3HardwareInterface::dump(
8488                const struct camera3_device *device, int fd)
8489{
8490    /* Log level property is read when "adb shell dumpsys media.camera" is
8491       called so that the log level can be controlled without restarting
8492       the media server */
8493    getLogLevel();
8494
8495    CDBG("%s: E", __func__);
8496    QCamera3HardwareInterface *hw =
8497        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8498    if (!hw) {
8499        ALOGE("%s: NULL camera device", __func__);
8500        return;
8501    }
8502
8503    hw->dump(fd);
8504    CDBG("%s: X", __func__);
8505    return;
8506}
8507
8508/*===========================================================================
8509 * FUNCTION   : flush
8510 *
8511 * DESCRIPTION:
8512 *
8513 * PARAMETERS :
8514 *
8515 *
8516 * RETURN     :
8517 *==========================================================================*/
8518
8519int QCamera3HardwareInterface::flush(
8520                const struct camera3_device *device)
8521{
8522    int rc;
8523    CDBG("%s: E", __func__);
8524    QCamera3HardwareInterface *hw =
8525        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8526    if (!hw) {
8527        ALOGE("%s: NULL camera device", __func__);
8528        return -EINVAL;
8529    }
8530
8531    rc = hw->flush();
8532    CDBG("%s: X", __func__);
8533    return rc;
8534}
8535
8536/*===========================================================================
8537 * FUNCTION   : close_camera_device
8538 *
8539 * DESCRIPTION:
8540 *
8541 * PARAMETERS :
8542 *
8543 *
8544 * RETURN     :
8545 *==========================================================================*/
8546int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8547{
8548    CDBG("%s: E", __func__);
8549    int ret = NO_ERROR;
8550    QCamera3HardwareInterface *hw =
8551        reinterpret_cast<QCamera3HardwareInterface *>(
8552            reinterpret_cast<camera3_device_t *>(device)->priv);
8553    if (!hw) {
8554        ALOGE("NULL camera device");
8555        return BAD_VALUE;
8556    }
8557    delete hw;
8558
8559    CDBG("%s: X", __func__);
8560    return ret;
8561}
8562
8563/*===========================================================================
8564 * FUNCTION   : getWaveletDenoiseProcessPlate
8565 *
8566 * DESCRIPTION: query wavelet denoise process plate
8567 *
8568 * PARAMETERS : None
8569 *
8570 * RETURN     : WNR prcocess plate value
8571 *==========================================================================*/
8572cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8573{
8574    char prop[PROPERTY_VALUE_MAX];
8575    memset(prop, 0, sizeof(prop));
8576    property_get("persist.denoise.process.plates", prop, "0");
8577    int processPlate = atoi(prop);
8578    switch(processPlate) {
8579    case 0:
8580        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8581    case 1:
8582        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8583    case 2:
8584        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8585    case 3:
8586        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8587    default:
8588        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8589    }
8590}
8591
8592
8593/*===========================================================================
8594 * FUNCTION   : getTemporalDenoiseProcessPlate
8595 *
8596 * DESCRIPTION: query temporal denoise process plate
8597 *
8598 * PARAMETERS : None
8599 *
8600 * RETURN     : TNR prcocess plate value
8601 *==========================================================================*/
8602cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
8603{
8604    char prop[PROPERTY_VALUE_MAX];
8605    memset(prop, 0, sizeof(prop));
8606    property_get("persist.tnr.process.plates", prop, "0");
8607    int processPlate = atoi(prop);
8608    switch(processPlate) {
8609    case 0:
8610        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8611    case 1:
8612        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8613    case 2:
8614        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8615    case 3:
8616        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8617    default:
8618        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8619    }
8620}
8621
8622
8623/*===========================================================================
8624 * FUNCTION   : extractSceneMode
8625 *
8626 * DESCRIPTION: Extract scene mode from frameworks set metadata
8627 *
8628 * PARAMETERS :
8629 *      @frame_settings: CameraMetadata reference
8630 *      @metaMode: ANDROID_CONTORL_MODE
8631 *      @hal_metadata: hal metadata structure
8632 *
8633 * RETURN     : None
8634 *==========================================================================*/
8635int32_t QCamera3HardwareInterface::extractSceneMode(
8636        const CameraMetadata &frame_settings, uint8_t metaMode,
8637        metadata_buffer_t *hal_metadata)
8638{
8639    int32_t rc = NO_ERROR;
8640
8641    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
8642        camera_metadata_ro_entry entry =
8643                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
8644        if (0 == entry.count)
8645            return rc;
8646
8647        uint8_t fwk_sceneMode = entry.data.u8[0];
8648
8649        int val = lookupHalName(SCENE_MODES_MAP,
8650                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
8651                fwk_sceneMode);
8652        if (NAME_NOT_FOUND != val) {
8653            uint8_t sceneMode = (uint8_t)val;
8654            CDBG("%s: sceneMode: %d", __func__, sceneMode);
8655            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8656                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8657                rc = BAD_VALUE;
8658            }
8659        }
8660    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
8661            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
8662        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
8663        CDBG("%s: sceneMode: %d", __func__, sceneMode);
8664        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8665                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8666            rc = BAD_VALUE;
8667        }
8668    }
8669    return rc;
8670}
8671
8672/*===========================================================================
8673 * FUNCTION   : needRotationReprocess
8674 *
8675 * DESCRIPTION: if rotation needs to be done by reprocess in pp
8676 *
8677 * PARAMETERS : none
8678 *
8679 * RETURN     : true: needed
8680 *              false: no need
8681 *==========================================================================*/
8682bool QCamera3HardwareInterface::needRotationReprocess()
8683{
8684    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
8685        // current rotation is not zero, and pp has the capability to process rotation
8686        CDBG_HIGH("%s: need do reprocess for rotation", __func__);
8687        return true;
8688    }
8689
8690    return false;
8691}
8692
8693/*===========================================================================
8694 * FUNCTION   : needReprocess
8695 *
8696 * DESCRIPTION: if reprocess in needed
8697 *
8698 * PARAMETERS : none
8699 *
8700 * RETURN     : true: needed
8701 *              false: no need
8702 *==========================================================================*/
8703bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
8704{
8705    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
8706        // TODO: add for ZSL HDR later
8707        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
8708        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
8709            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
8710            return true;
8711        } else {
8712            CDBG_HIGH("%s: already post processed frame", __func__);
8713            return false;
8714        }
8715    }
8716    return needRotationReprocess();
8717}
8718
8719/*===========================================================================
8720 * FUNCTION   : needJpegRotation
8721 *
8722 * DESCRIPTION: if rotation from jpeg is needed
8723 *
8724 * PARAMETERS : none
8725 *
8726 * RETURN     : true: needed
8727 *              false: no need
8728 *==========================================================================*/
8729bool QCamera3HardwareInterface::needJpegRotation()
8730{
8731   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
8732    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
8733       CDBG("%s: Need Jpeg to do the rotation", __func__);
8734       return true;
8735    }
8736    return false;
8737}
8738
8739/*===========================================================================
8740 * FUNCTION   : addOfflineReprocChannel
8741 *
8742 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
8743 *              coming from input channel
8744 *
8745 * PARAMETERS :
8746 *   @config  : reprocess configuration
8747 *   @inputChHandle : pointer to the input (source) channel
8748 *
8749 *
8750 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
8751 *==========================================================================*/
8752QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
8753        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
8754{
8755    int32_t rc = NO_ERROR;
8756    QCamera3ReprocessChannel *pChannel = NULL;
8757
8758    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
8759            mChannelHandle, mCameraHandle->ops, NULL, config.padding,
8760            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
8761    if (NULL == pChannel) {
8762        ALOGE("%s: no mem for reprocess channel", __func__);
8763        return NULL;
8764    }
8765
8766    rc = pChannel->initialize(IS_TYPE_NONE);
8767    if (rc != NO_ERROR) {
8768        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
8769        delete pChannel;
8770        return NULL;
8771    }
8772
8773    // pp feature config
8774    cam_pp_feature_config_t pp_config;
8775    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
8776
8777    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
8778
8779    rc = pChannel->addReprocStreamsFromSource(pp_config,
8780            config,
8781            IS_TYPE_NONE,
8782            mMetadataChannel);
8783
8784    if (rc != NO_ERROR) {
8785        delete pChannel;
8786        return NULL;
8787    }
8788    return pChannel;
8789}
8790
8791/*===========================================================================
8792 * FUNCTION   : getMobicatMask
8793 *
8794 * DESCRIPTION: returns mobicat mask
8795 *
8796 * PARAMETERS : none
8797 *
8798 * RETURN     : mobicat mask
8799 *
8800 *==========================================================================*/
8801uint8_t QCamera3HardwareInterface::getMobicatMask()
8802{
8803    return m_MobicatMask;
8804}
8805
8806/*===========================================================================
8807 * FUNCTION   : setMobicat
8808 *
8809 * DESCRIPTION: set Mobicat on/off.
8810 *
8811 * PARAMETERS :
8812 *   @params  : none
8813 *
8814 * RETURN     : int32_t type of status
8815 *              NO_ERROR  -- success
8816 *              none-zero failure code
8817 *==========================================================================*/
8818int32_t QCamera3HardwareInterface::setMobicat()
8819{
8820    char value [PROPERTY_VALUE_MAX];
8821    property_get("persist.camera.mobicat", value, "0");
8822    int32_t ret = NO_ERROR;
8823    uint8_t enableMobi = (uint8_t)atoi(value);
8824
8825    if (enableMobi) {
8826        tune_cmd_t tune_cmd;
8827        tune_cmd.type = SET_RELOAD_CHROMATIX;
8828        tune_cmd.module = MODULE_ALL;
8829        tune_cmd.value = TRUE;
8830        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8831                CAM_INTF_PARM_SET_VFE_COMMAND,
8832                tune_cmd);
8833
8834        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8835                CAM_INTF_PARM_SET_PP_COMMAND,
8836                tune_cmd);
8837    }
8838    m_MobicatMask = enableMobi;
8839
8840    return ret;
8841}
8842
8843/*===========================================================================
8844* FUNCTION   : getLogLevel
8845*
8846* DESCRIPTION: Reads the log level property into a variable
8847*
8848* PARAMETERS :
8849*   None
8850*
8851* RETURN     :
8852*   None
8853*==========================================================================*/
8854void QCamera3HardwareInterface::getLogLevel()
8855{
8856    char prop[PROPERTY_VALUE_MAX];
8857    uint32_t globalLogLevel = 0;
8858
8859    property_get("persist.camera.hal.debug", prop, "0");
8860    int val = atoi(prop);
8861    if (0 <= val) {
8862        gCamHal3LogLevel = (uint32_t)val;
8863    }
8864    property_get("persist.camera.global.debug", prop, "0");
8865    val = atoi(prop);
8866    if (0 <= val) {
8867        globalLogLevel = (uint32_t)val;
8868    }
8869
8870    /* Highest log level among hal.logs and global.logs is selected */
8871    if (gCamHal3LogLevel < globalLogLevel)
8872        gCamHal3LogLevel = globalLogLevel;
8873
8874    return;
8875}
8876
8877/*===========================================================================
8878 * FUNCTION   : validateStreamRotations
8879 *
8880 * DESCRIPTION: Check if the rotations requested are supported
8881 *
8882 * PARAMETERS :
8883 *   @stream_list : streams to be configured
8884 *
8885 * RETURN     : NO_ERROR on success
8886 *              -EINVAL on failure
8887 *
8888 *==========================================================================*/
8889int QCamera3HardwareInterface::validateStreamRotations(
8890        camera3_stream_configuration_t *streamList)
8891{
8892    int rc = NO_ERROR;
8893
8894    /*
8895    * Loop through all streams requested in configuration
8896    * Check if unsupported rotations have been requested on any of them
8897    */
8898    for (size_t j = 0; j < streamList->num_streams; j++){
8899        camera3_stream_t *newStream = streamList->streams[j];
8900
8901        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
8902        bool isImplDef = (newStream->format ==
8903                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
8904        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
8905                isImplDef);
8906
8907        if (isRotated && (!isImplDef || isZsl)) {
8908            ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
8909                    "type:%d and stream format:%d", __func__,
8910                    newStream->rotation, newStream->stream_type,
8911                    newStream->format);
8912            rc = -EINVAL;
8913            break;
8914        }
8915    }
8916    return rc;
8917}
8918
8919/*===========================================================================
8920* FUNCTION   : getFlashInfo
8921*
8922* DESCRIPTION: Retrieve information about whether the device has a flash.
8923*
8924* PARAMETERS :
8925*   @cameraId  : Camera id to query
8926*   @hasFlash  : Boolean indicating whether there is a flash device
8927*                associated with given camera
8928*   @flashNode : If a flash device exists, this will be its device node.
8929*
8930* RETURN     :
8931*   None
8932*==========================================================================*/
8933void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
8934        bool& hasFlash,
8935        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
8936{
8937    cam_capability_t* camCapability = gCamCapability[cameraId];
8938    if (NULL == camCapability) {
8939        hasFlash = false;
8940        flashNode[0] = '\0';
8941    } else {
8942        hasFlash = camCapability->flash_available;
8943        strlcpy(flashNode,
8944                (char*)camCapability->flash_dev_name,
8945                QCAMERA_MAX_FILEPATH_LENGTH);
8946    }
8947}
8948
8949/*===========================================================================
8950* FUNCTION   : getEepromVersionInfo
8951*
8952* DESCRIPTION: Retrieve version info of the sensor EEPROM data
8953*
8954* PARAMETERS : None
8955*
8956* RETURN     : string describing EEPROM version
8957*              "\0" if no such info available
8958*==========================================================================*/
8959const char *QCamera3HardwareInterface::getEepromVersionInfo()
8960{
8961    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
8962}
8963
8964/*===========================================================================
8965* FUNCTION   : getLdafCalib
8966*
8967* DESCRIPTION: Retrieve Laser AF calibration data
8968*
8969* PARAMETERS : None
8970*
8971* RETURN     : Two uint32_t describing laser AF calibration data
8972*              NULL if none is available.
8973*==========================================================================*/
8974const uint32_t *QCamera3HardwareInterface::getLdafCalib()
8975{
8976    if (mLdafCalibExist) {
8977        return &mLdafCalib[0];
8978    } else {
8979        return NULL;
8980    }
8981}
8982
8983/*===========================================================================
8984 * FUNCTION   : dynamicUpdateMetaStreamInfo
8985 *
8986 * DESCRIPTION: This function:
8987 *             (1) stops all the channels
8988 *             (2) returns error on pending requests and buffers
8989 *             (3) sends metastream_info in setparams
8990 *             (4) starts all channels
8991 *             This is useful when sensor has to be restarted to apply any
8992 *             settings such as frame rate from a different sensor mode
8993 *
8994 * PARAMETERS : None
8995 *
8996 * RETURN     : NO_ERROR on success
8997 *              Error codes on failure
8998 *
8999 *==========================================================================*/
9000int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9001{
9002    ATRACE_CALL();
9003    int rc = NO_ERROR;
9004
9005    CDBG("%s: E", __func__);
9006
9007    rc = stopAllChannels();
9008    if (rc < 0) {
9009        ALOGE("%s: stopAllChannels failed", __func__);
9010        return rc;
9011    }
9012
9013    rc = notifyErrorForPendingRequests();
9014    if (rc < 0) {
9015        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9016        return rc;
9017    }
9018
9019    /* Send meta stream info once again so that ISP can start */
9020    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9021            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9022    CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9023    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9024            mParameters);
9025    if (rc < 0) {
9026        ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9027                __func__);
9028    }
9029
9030    rc = startAllChannels();
9031    if (rc < 0) {
9032        ALOGE("%s: startAllChannels failed", __func__);
9033        return rc;
9034    }
9035
9036    CDBG("%s:%d X", __func__, __LINE__);
9037    return rc;
9038}
9039
9040/*===========================================================================
9041 * FUNCTION   : stopAllChannels
9042 *
9043 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9044 *
9045 * PARAMETERS : None
9046 *
9047 * RETURN     : NO_ERROR on success
9048 *              Error codes on failure
9049 *
9050 *==========================================================================*/
9051int32_t QCamera3HardwareInterface::stopAllChannels()
9052{
9053    int32_t rc = NO_ERROR;
9054
9055    // Stop the Streams/Channels
9056    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9057        it != mStreamInfo.end(); it++) {
9058        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9059        channel->stop();
9060        (*it)->status = INVALID;
9061    }
9062
9063    if (mSupportChannel) {
9064        mSupportChannel->stop();
9065    }
9066    if (mAnalysisChannel) {
9067        mAnalysisChannel->stop();
9068    }
9069    if (mRawDumpChannel) {
9070        mRawDumpChannel->stop();
9071    }
9072    if (mMetadataChannel) {
9073        /* If content of mStreamInfo is not 0, there is metadata stream */
9074        mMetadataChannel->stop();
9075    }
9076
9077    CDBG("%s:%d All channels stopped", __func__, __LINE__);
9078    return rc;
9079}
9080
9081/*===========================================================================
9082 * FUNCTION   : startAllChannels
9083 *
9084 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9085 *
9086 * PARAMETERS : None
9087 *
9088 * RETURN     : NO_ERROR on success
9089 *              Error codes on failure
9090 *
9091 *==========================================================================*/
9092int32_t QCamera3HardwareInterface::startAllChannels()
9093{
9094    int32_t rc = NO_ERROR;
9095
9096    CDBG("%s: Start all channels ", __func__);
9097    // Start the Streams/Channels
9098    if (mMetadataChannel) {
9099        /* If content of mStreamInfo is not 0, there is metadata stream */
9100        rc = mMetadataChannel->start();
9101        if (rc < 0) {
9102            ALOGE("%s: META channel start failed", __func__);
9103            return rc;
9104        }
9105    }
9106    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9107        it != mStreamInfo.end(); it++) {
9108        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9109        rc = channel->start();
9110        if (rc < 0) {
9111            ALOGE("%s: channel start failed", __func__);
9112            return rc;
9113        }
9114    }
9115    if (mAnalysisChannel) {
9116        mAnalysisChannel->start();
9117    }
9118    if (mSupportChannel) {
9119        rc = mSupportChannel->start();
9120        if (rc < 0) {
9121            ALOGE("%s: Support channel start failed", __func__);
9122            return rc;
9123        }
9124    }
9125    if (mRawDumpChannel) {
9126        rc = mRawDumpChannel->start();
9127        if (rc < 0) {
9128            ALOGE("%s: RAW dump channel start failed", __func__);
9129            return rc;
9130        }
9131    }
9132
9133    CDBG("%s:%d All channels started", __func__, __LINE__);
9134    return rc;
9135}
9136
9137/*===========================================================================
9138 * FUNCTION   : notifyErrorForPendingRequests
9139 *
9140 * DESCRIPTION: This function sends error for all the pending requests/buffers
9141 *
9142 * PARAMETERS : None
9143 *
9144 * RETURN     : Error codes
9145 *              NO_ERROR on success
9146 *
9147 *==========================================================================*/
9148int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9149{
9150    int32_t rc = NO_ERROR;
9151    unsigned int frameNum = 0;
9152    camera3_capture_result_t result;
9153    camera3_stream_buffer_t *pStream_Buf = NULL;
9154    FlushMap flushMap;
9155
9156    memset(&result, 0, sizeof(camera3_capture_result_t));
9157
9158    if (mPendingRequestsList.size() > 0) {
9159        pendingRequestIterator i = mPendingRequestsList.begin();
9160        frameNum = i->frame_number;
9161    } else {
9162        /* There might still be pending buffers even though there are
9163         no pending requests. Setting the frameNum to MAX so that
9164         all the buffers with smaller frame numbers are returned */
9165        frameNum = UINT_MAX;
9166    }
9167
9168    CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
9169      __func__, frameNum);
9170
9171    // Go through the pending buffers and group them depending
9172    // on frame number
9173    for (List<PendingBufferInfo>::iterator k =
9174            mPendingBuffersMap.mPendingBufferList.begin();
9175            k != mPendingBuffersMap.mPendingBufferList.end();) {
9176
9177        if (k->frame_number < frameNum) {
9178            ssize_t idx = flushMap.indexOfKey(k->frame_number);
9179            if (idx == NAME_NOT_FOUND) {
9180                Vector<PendingBufferInfo> pending;
9181                pending.add(*k);
9182                flushMap.add(k->frame_number, pending);
9183            } else {
9184                Vector<PendingBufferInfo> &pending =
9185                        flushMap.editValueFor(k->frame_number);
9186                pending.add(*k);
9187            }
9188
9189            mPendingBuffersMap.num_buffers--;
9190            k = mPendingBuffersMap.mPendingBufferList.erase(k);
9191        } else {
9192            k++;
9193        }
9194    }
9195
9196    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9197        uint32_t frame_number = flushMap.keyAt(iFlush);
9198        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9199
9200        // Send Error notify to frameworks for each buffer for which
9201        // metadata buffer is already sent
9202        CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9203          __func__, frame_number, pending.size());
9204
9205        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9206        if (NULL == pStream_Buf) {
9207            ALOGE("%s: No memory for pending buffers array", __func__);
9208            return NO_MEMORY;
9209        }
9210        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9211
9212        for (size_t j = 0; j < pending.size(); j++) {
9213            const PendingBufferInfo &info = pending.itemAt(j);
9214            camera3_notify_msg_t notify_msg;
9215            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9216            notify_msg.type = CAMERA3_MSG_ERROR;
9217            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9218            notify_msg.message.error.error_stream = info.stream;
9219            notify_msg.message.error.frame_number = frame_number;
9220            pStream_Buf[j].acquire_fence = -1;
9221            pStream_Buf[j].release_fence = -1;
9222            pStream_Buf[j].buffer = info.buffer;
9223            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9224            pStream_Buf[j].stream = info.stream;
9225            mCallbackOps->notify(mCallbackOps, &notify_msg);
9226            CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9227                    frame_number, info.stream);
9228        }
9229
9230        result.result = NULL;
9231        result.frame_number = frame_number;
9232        result.num_output_buffers = (uint32_t)pending.size();
9233        result.output_buffers = pStream_Buf;
9234        mCallbackOps->process_capture_result(mCallbackOps, &result);
9235
9236        delete [] pStream_Buf;
9237    }
9238
9239    CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9240
9241    flushMap.clear();
9242    for (List<PendingBufferInfo>::iterator k =
9243            mPendingBuffersMap.mPendingBufferList.begin();
9244            k != mPendingBuffersMap.mPendingBufferList.end();) {
9245        ssize_t idx = flushMap.indexOfKey(k->frame_number);
9246        if (idx == NAME_NOT_FOUND) {
9247            Vector<PendingBufferInfo> pending;
9248            pending.add(*k);
9249            flushMap.add(k->frame_number, pending);
9250        } else {
9251            Vector<PendingBufferInfo> &pending =
9252                    flushMap.editValueFor(k->frame_number);
9253            pending.add(*k);
9254        }
9255
9256        mPendingBuffersMap.num_buffers--;
9257        k = mPendingBuffersMap.mPendingBufferList.erase(k);
9258    }
9259
9260    pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9261
9262    // Go through the pending requests info and send error request to framework
9263    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9264        uint32_t frame_number = flushMap.keyAt(iFlush);
9265        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9266        CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9267              __func__, frame_number);
9268
9269        // Send shutter notify to frameworks
9270        camera3_notify_msg_t notify_msg;
9271        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9272        notify_msg.type = CAMERA3_MSG_ERROR;
9273        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9274        notify_msg.message.error.error_stream = NULL;
9275        notify_msg.message.error.frame_number = frame_number;
9276        mCallbackOps->notify(mCallbackOps, &notify_msg);
9277
9278        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9279        if (NULL == pStream_Buf) {
9280            ALOGE("%s: No memory for pending buffers array", __func__);
9281            return NO_MEMORY;
9282        }
9283        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9284
9285        for (size_t j = 0; j < pending.size(); j++) {
9286            const PendingBufferInfo &info = pending.itemAt(j);
9287            pStream_Buf[j].acquire_fence = -1;
9288            pStream_Buf[j].release_fence = -1;
9289            pStream_Buf[j].buffer = info.buffer;
9290            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9291            pStream_Buf[j].stream = info.stream;
9292        }
9293
9294        result.input_buffer = i->input_buffer;
9295        result.num_output_buffers = (uint32_t)pending.size();
9296        result.output_buffers = pStream_Buf;
9297        result.result = NULL;
9298        result.frame_number = frame_number;
9299        mCallbackOps->process_capture_result(mCallbackOps, &result);
9300        delete [] pStream_Buf;
9301        i = erasePendingRequest(i);
9302    }
9303
9304    /* Reset pending frame Drop list and requests list */
9305    mPendingFrameDropList.clear();
9306
9307    flushMap.clear();
9308    mPendingBuffersMap.num_buffers = 0;
9309    mPendingBuffersMap.mPendingBufferList.clear();
9310    mPendingReprocessResultList.clear();
9311    CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9312
9313    return rc;
9314}
9315
9316bool QCamera3HardwareInterface::isOnEncoder(
9317        const cam_dimension_t max_viewfinder_size,
9318        uint32_t width, uint32_t height)
9319{
9320    return (width > (uint32_t)max_viewfinder_size.width ||
9321            height > (uint32_t)max_viewfinder_size.height);
9322}
9323
9324/*===========================================================================
9325 * FUNCTION   : setBundleInfo
9326 *
9327 * DESCRIPTION: Set bundle info for all streams that are bundle.
9328 *
9329 * PARAMETERS : None
9330 *
9331 * RETURN     : NO_ERROR on success
9332 *              Error codes on failure
9333 *==========================================================================*/
9334int32_t QCamera3HardwareInterface::setBundleInfo()
9335{
9336    int32_t rc = NO_ERROR;
9337
9338    if (mChannelHandle) {
9339        cam_bundle_config_t bundleInfo;
9340        memset(&bundleInfo, 0, sizeof(bundleInfo));
9341        rc = mCameraHandle->ops->get_bundle_info(
9342                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9343        if (rc != NO_ERROR) {
9344            ALOGE("%s: get_bundle_info failed", __func__);
9345            return rc;
9346        }
9347        if (mAnalysisChannel) {
9348            mAnalysisChannel->setBundleInfo(bundleInfo);
9349        }
9350        if (mSupportChannel) {
9351            mSupportChannel->setBundleInfo(bundleInfo);
9352        }
9353        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9354                it != mStreamInfo.end(); it++) {
9355            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9356            channel->setBundleInfo(bundleInfo);
9357        }
9358        if (mRawDumpChannel) {
9359            mRawDumpChannel->setBundleInfo(bundleInfo);
9360        }
9361    }
9362
9363    return rc;
9364}
9365
9366}; //end namespace qcamera
9367