QCamera3HWI.cpp revision 73640de4bcd72e53708202d0fa84c94936dbe1da
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define ATRACE_TAG ATRACE_TAG_CAMERA
31#define LOG_TAG "QCamera3HWI"
32//#define LOG_NDEBUG 0
33
34#define __STDC_LIMIT_MACROS
35#include <cutils/properties.h>
36#include <hardware/camera3.h>
37#include <camera/CameraMetadata.h>
38#include <stdio.h>
39#include <stdlib.h>
40#include <fcntl.h>
41#include <stdint.h>
42#include <utils/Log.h>
43#include <utils/Errors.h>
44#include <utils/Trace.h>
45#include <sync/sync.h>
46#include <gralloc_priv.h>
47#include "util/QCameraFlash.h"
48#include "QCamera3HWI.h"
49#include "QCamera3Mem.h"
50#include "QCamera3Channel.h"
51#include "QCamera3PostProc.h"
52#include "QCamera3VendorTags.h"
53
54using namespace android;
55
56namespace qcamera {
57
58#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60#define EMPTY_PIPELINE_DELAY 2
61#define PARTIAL_RESULT_COUNT 2
62#define FRAME_SKIP_DELAY     0
63#define CAM_MAX_SYNC_LATENCY 4
64
65#define MAX_VALUE_8BIT ((1<<8)-1)
66#define MAX_VALUE_10BIT ((1<<10)-1)
67#define MAX_VALUE_12BIT ((1<<12)-1)
68
69#define VIDEO_4K_WIDTH  3840
70#define VIDEO_4K_HEIGHT 2160
71
72#define MAX_EIS_WIDTH 1920
73#define MAX_EIS_HEIGHT 1080
74
75#define MAX_RAW_STREAMS        1
76#define MAX_STALLING_STREAMS   1
77#define MAX_PROCESSED_STREAMS  3
78/* Batch mode is enabled only if FPS set is equal to or greater than this */
79#define MIN_FPS_FOR_BATCH_MODE (120)
80#define PREVIEW_FPS_FOR_HFR    (30)
81#define DEFAULT_VIDEO_FPS      (30.0)
82#define MAX_HFR_BATCH_SIZE     (8)
83#define REGIONS_TUPLE_COUNT    5
84#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
85
86#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
87
88#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
89                                              CAM_QCOM_FEATURE_CROP |\
90                                              CAM_QCOM_FEATURE_ROTATION |\
91                                              CAM_QCOM_FEATURE_SHARPNESS |\
92                                              CAM_QCOM_FEATURE_SCALE |\
93                                              CAM_QCOM_FEATURE_CAC |\
94                                              CAM_QCOM_FEATURE_CDS )
95
96#define TIMEOUT_NEVER -1
97
98cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
99const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
100static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
101volatile uint32_t gCamHal3LogLevel = 1;
102
103const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
104    {"On",  CAM_CDS_MODE_ON},
105    {"Off", CAM_CDS_MODE_OFF},
106    {"Auto",CAM_CDS_MODE_AUTO}
107};
108
109const QCamera3HardwareInterface::QCameraMap<
110        camera_metadata_enum_android_control_effect_mode_t,
111        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
112    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
113    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
114    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
115    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
116    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
117    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
118    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
119    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
120    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
121};
122
123const QCamera3HardwareInterface::QCameraMap<
124        camera_metadata_enum_android_control_awb_mode_t,
125        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
126    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
127    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
128    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
129    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
130    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
131    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
132    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
133    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
134    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
135};
136
137const QCamera3HardwareInterface::QCameraMap<
138        camera_metadata_enum_android_control_scene_mode_t,
139        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
140    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
141    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
142    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
143    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
144    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
145    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
146    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
147    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
148    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
149    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
150    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
151    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
152    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
153    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
154    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
155    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
156};
157
158const QCamera3HardwareInterface::QCameraMap<
159        camera_metadata_enum_android_control_af_mode_t,
160        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
161    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
162    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
163    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
164    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
165    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
166    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
167    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
168};
169
170const QCamera3HardwareInterface::QCameraMap<
171        camera_metadata_enum_android_color_correction_aberration_mode_t,
172        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
173    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
174            CAM_COLOR_CORRECTION_ABERRATION_OFF },
175    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
176            CAM_COLOR_CORRECTION_ABERRATION_FAST },
177    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
178            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_control_ae_antibanding_mode_t,
183        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
184    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
185    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
186    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
187    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
188};
189
190const QCamera3HardwareInterface::QCameraMap<
191        camera_metadata_enum_android_control_ae_mode_t,
192        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
193    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
194    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
195    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
196    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
197    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
198};
199
200const QCamera3HardwareInterface::QCameraMap<
201        camera_metadata_enum_android_flash_mode_t,
202        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
203    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
204    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
205    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209        camera_metadata_enum_android_statistics_face_detect_mode_t,
210        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
211    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
212    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
213    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
214};
215
216const QCamera3HardwareInterface::QCameraMap<
217        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
218        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
219    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
220      CAM_FOCUS_UNCALIBRATED },
221    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
222      CAM_FOCUS_APPROXIMATE },
223    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
224      CAM_FOCUS_CALIBRATED }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_state_t,
229        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
230    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
231    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
232};
233
234const int32_t available_thumbnail_sizes[] = {0, 0,
235                                             176, 144,
236                                             320, 240,
237                                             432, 288,
238                                             480, 288,
239                                             512, 288,
240                                             512, 384};
241
242const QCamera3HardwareInterface::QCameraMap<
243        camera_metadata_enum_android_sensor_test_pattern_mode_t,
244        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
245    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
246    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
247    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
248    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
249    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
250};
251
252/* Since there is no mapping for all the options some Android enum are not listed.
253 * Also, the order in this list is important because while mapping from HAL to Android it will
254 * traverse from lower to higher index which means that for HAL values that are map to different
255 * Android values, the traverse logic will select the first one found.
256 */
257const QCamera3HardwareInterface::QCameraMap<
258        camera_metadata_enum_android_sensor_reference_illuminant1_t,
259        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
260    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
261    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
262    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
263    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
264    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
265    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
266    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
267    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
268    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
269    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
270    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
271    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
272    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
273    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
276};
277
278const QCamera3HardwareInterface::QCameraMap<
279        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
280    { 60, CAM_HFR_MODE_60FPS},
281    { 90, CAM_HFR_MODE_90FPS},
282    { 120, CAM_HFR_MODE_120FPS},
283    { 150, CAM_HFR_MODE_150FPS},
284    { 180, CAM_HFR_MODE_180FPS},
285    { 210, CAM_HFR_MODE_210FPS},
286    { 240, CAM_HFR_MODE_240FPS},
287    { 480, CAM_HFR_MODE_480FPS},
288};
289
290camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
291    initialize:                         QCamera3HardwareInterface::initialize,
292    configure_streams:                  QCamera3HardwareInterface::configure_streams,
293    register_stream_buffers:            NULL,
294    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
295    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
296    get_metadata_vendor_tag_ops:        NULL,
297    dump:                               QCamera3HardwareInterface::dump,
298    flush:                              QCamera3HardwareInterface::flush,
299    reserved:                           {0},
300};
301
302/*===========================================================================
303 * FUNCTION   : QCamera3HardwareInterface
304 *
305 * DESCRIPTION: constructor of QCamera3HardwareInterface
306 *
307 * PARAMETERS :
308 *   @cameraId  : camera ID
309 *
310 * RETURN     : none
311 *==========================================================================*/
312QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
313        const camera_module_callbacks_t *callbacks)
314    : mCameraId(cameraId),
315      mCameraHandle(NULL),
316      mCameraOpened(false),
317      mCameraInitialized(false),
318      mCallbackOps(NULL),
319      mMetadataChannel(NULL),
320      mPictureChannel(NULL),
321      mRawChannel(NULL),
322      mSupportChannel(NULL),
323      mAnalysisChannel(NULL),
324      mRawDumpChannel(NULL),
325      mDummyBatchChannel(NULL),
326      mChannelHandle(0),
327      mFirstRequest(false),
328      mFirstConfiguration(true),
329      mFlush(false),
330      mParamHeap(NULL),
331      mParameters(NULL),
332      mPrevParameters(NULL),
333      m_bIsVideo(false),
334      m_bIs4KVideo(false),
335      m_bEisSupportedSize(false),
336      m_bEisEnable(false),
337      m_MobicatMask(0),
338      mMinProcessedFrameDuration(0),
339      mMinJpegFrameDuration(0),
340      mMinRawFrameDuration(0),
341      mMetaFrameCount(0U),
342      mUpdateDebugLevel(false),
343      mCallbacks(callbacks),
344      mCaptureIntent(0),
345      mBatchSize(0),
346      mToBeQueuedVidBufs(0),
347      mHFRVideoFps(DEFAULT_VIDEO_FPS),
348      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
349      mFirstFrameNumberInBatch(0),
350      mNeedSensorRestart(false),
351      mLdafCalibExist(false),
352      mPowerHintEnabled(false),
353      mLastCustIntentFrmNum(-1)
354{
355    getLogLevel();
356    m_perfLock.lock_init();
357    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
358    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
359    mCameraDevice.common.close = close_camera_device;
360    mCameraDevice.ops = &mCameraOps;
361    mCameraDevice.priv = this;
362    gCamCapability[cameraId]->version = CAM_HAL_V3;
363    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
364    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
365    gCamCapability[cameraId]->min_num_pp_bufs = 3;
366    pthread_cond_init(&mRequestCond, NULL);
367    mPendingLiveRequest = 0;
368    mCurrentRequestId = -1;
369    pthread_mutex_init(&mMutex, NULL);
370
371    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
372        mDefaultMetadata[i] = NULL;
373
374    // Getting system props of different kinds
375    char prop[PROPERTY_VALUE_MAX];
376    memset(prop, 0, sizeof(prop));
377    property_get("persist.camera.raw.dump", prop, "0");
378    mEnableRawDump = atoi(prop);
379    if (mEnableRawDump)
380        CDBG("%s: Raw dump from Camera HAL enabled", __func__);
381
382    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
383    memset(mLdafCalib, 0, sizeof(mLdafCalib));
384
385    memset(prop, 0, sizeof(prop));
386    property_get("persist.camera.tnr.preview", prop, "1");
387    m_bTnrPreview = (uint8_t)atoi(prop);
388
389    memset(prop, 0, sizeof(prop));
390    property_get("persist.camera.tnr.video", prop, "1");
391    m_bTnrVideo = (uint8_t)atoi(prop);
392}
393
394/*===========================================================================
395 * FUNCTION   : ~QCamera3HardwareInterface
396 *
397 * DESCRIPTION: destructor of QCamera3HardwareInterface
398 *
399 * PARAMETERS : none
400 *
401 * RETURN     : none
402 *==========================================================================*/
403QCamera3HardwareInterface::~QCamera3HardwareInterface()
404{
405    CDBG("%s: E", __func__);
406
407    /* Turn off current power hint before acquiring perfLock in case they
408     * conflict with each other */
409    disablePowerHint();
410
411    m_perfLock.lock_acq();
412
413    /* We need to stop all streams before deleting any stream */
414    if (mRawDumpChannel) {
415        mRawDumpChannel->stop();
416    }
417
418    // NOTE: 'camera3_stream_t *' objects are already freed at
419    //        this stage by the framework
420    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
421        it != mStreamInfo.end(); it++) {
422        QCamera3ProcessingChannel *channel = (*it)->channel;
423        if (channel) {
424            channel->stop();
425        }
426    }
427    if (mSupportChannel)
428        mSupportChannel->stop();
429
430    if (mAnalysisChannel) {
431        mAnalysisChannel->stop();
432    }
433    if (mMetadataChannel) {
434        mMetadataChannel->stop();
435    }
436    if (mChannelHandle) {
437        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
438                mChannelHandle);
439        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
440    }
441
442    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
443        it != mStreamInfo.end(); it++) {
444        QCamera3ProcessingChannel *channel = (*it)->channel;
445        if (channel)
446            delete channel;
447        free (*it);
448    }
449    if (mSupportChannel) {
450        delete mSupportChannel;
451        mSupportChannel = NULL;
452    }
453
454    if (mAnalysisChannel) {
455        delete mAnalysisChannel;
456        mAnalysisChannel = NULL;
457    }
458    if (mRawDumpChannel) {
459        delete mRawDumpChannel;
460        mRawDumpChannel = NULL;
461    }
462    if (mDummyBatchChannel) {
463        delete mDummyBatchChannel;
464        mDummyBatchChannel = NULL;
465    }
466    mPictureChannel = NULL;
467
468    if (mMetadataChannel) {
469        delete mMetadataChannel;
470        mMetadataChannel = NULL;
471    }
472
473    /* Clean up all channels */
474    if (mCameraInitialized) {
475        if(!mFirstConfiguration){
476            //send the last unconfigure
477            cam_stream_size_info_t stream_config_info;
478            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
479            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
480            stream_config_info.buffer_info.max_buffers =
481                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
482            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
483                    stream_config_info);
484            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
485            if (rc < 0) {
486                ALOGE("%s: set_parms failed for unconfigure", __func__);
487            }
488        }
489        deinitParameters();
490    }
491
492    if (mChannelHandle) {
493        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
494                mChannelHandle);
495        ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
496        mChannelHandle = 0;
497    }
498
499    if (mCameraOpened)
500        closeCamera();
501
502    mPendingBuffersMap.mPendingBufferList.clear();
503    mPendingReprocessResultList.clear();
504    for (pendingRequestIterator i = mPendingRequestsList.begin();
505            i != mPendingRequestsList.end();) {
506        i = erasePendingRequest(i);
507    }
508    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
509        if (mDefaultMetadata[i])
510            free_camera_metadata(mDefaultMetadata[i]);
511
512    m_perfLock.lock_rel();
513    m_perfLock.lock_deinit();
514
515    pthread_cond_destroy(&mRequestCond);
516
517    pthread_mutex_destroy(&mMutex);
518    CDBG("%s: X", __func__);
519}
520
521/*===========================================================================
522 * FUNCTION   : erasePendingRequest
523 *
524 * DESCRIPTION: function to erase a desired pending request after freeing any
525 *              allocated memory
526 *
527 * PARAMETERS :
528 *   @i       : iterator pointing to pending request to be erased
529 *
530 * RETURN     : iterator pointing to the next request
531 *==========================================================================*/
532QCamera3HardwareInterface::pendingRequestIterator
533        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
534{
535    if (i->input_buffer != NULL) {
536        free(i->input_buffer);
537        i->input_buffer = NULL;
538    }
539    if (i->settings != NULL)
540        free_camera_metadata((camera_metadata_t*)i->settings);
541    return mPendingRequestsList.erase(i);
542}
543
544/*===========================================================================
545 * FUNCTION   : camEvtHandle
546 *
547 * DESCRIPTION: Function registered to mm-camera-interface to handle events
548 *
549 * PARAMETERS :
550 *   @camera_handle : interface layer camera handle
551 *   @evt           : ptr to event
552 *   @user_data     : user data ptr
553 *
554 * RETURN     : none
555 *==========================================================================*/
556void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
557                                          mm_camera_event_t *evt,
558                                          void *user_data)
559{
560    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
561    if (obj && evt) {
562        switch(evt->server_event_type) {
563            case CAM_EVENT_TYPE_DAEMON_DIED:
564                ALOGE("%s: Fatal, camera daemon died", __func__);
565                //close the camera backend
566                if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
567                        && obj->mCameraHandle->ops) {
568                    obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
569                } else {
570                    ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
571                            __func__);
572                }
573                camera3_notify_msg_t notify_msg;
574                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
575                notify_msg.type = CAMERA3_MSG_ERROR;
576                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
577                notify_msg.message.error.error_stream = NULL;
578                notify_msg.message.error.frame_number = 0;
579                obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
580                break;
581
582            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
583                CDBG("%s: HAL got request pull from Daemon", __func__);
584                pthread_mutex_lock(&obj->mMutex);
585                obj->mWokenUpByDaemon = true;
586                obj->unblockRequestIfNecessary();
587                pthread_mutex_unlock(&obj->mMutex);
588                break;
589
590            default:
591                CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
592                        evt->server_event_type);
593                break;
594        }
595    } else {
596        ALOGE("%s: NULL user_data/evt", __func__);
597    }
598}
599
600/*===========================================================================
601 * FUNCTION   : openCamera
602 *
603 * DESCRIPTION: open camera
604 *
605 * PARAMETERS :
606 *   @hw_device  : double ptr for camera device struct
607 *
608 * RETURN     : int32_t type of status
609 *              NO_ERROR  -- success
610 *              none-zero failure code
611 *==========================================================================*/
612int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
613{
614    int rc = 0;
615    if (mCameraOpened) {
616        *hw_device = NULL;
617        return PERMISSION_DENIED;
618    }
619    m_perfLock.lock_acq();
620    rc = openCamera();
621    if (rc == 0) {
622        *hw_device = &mCameraDevice.common;
623    } else
624        *hw_device = NULL;
625
626    m_perfLock.lock_rel();
627    return rc;
628}
629
630/*===========================================================================
631 * FUNCTION   : openCamera
632 *
633 * DESCRIPTION: open camera
634 *
635 * PARAMETERS : none
636 *
637 * RETURN     : int32_t type of status
638 *              NO_ERROR  -- success
639 *              none-zero failure code
640 *==========================================================================*/
641int QCamera3HardwareInterface::openCamera()
642{
643    int rc = 0;
644
645    ATRACE_CALL();
646    if (mCameraHandle) {
647        ALOGE("Failure: Camera already opened");
648        return ALREADY_EXISTS;
649    }
650
651    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
652    if (rc < 0) {
653        ALOGE("%s: Failed to reserve flash for camera id: %d",
654                __func__,
655                mCameraId);
656        return UNKNOWN_ERROR;
657    }
658
659    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
660    if (rc) {
661        ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
662        return rc;
663    }
664
665    mCameraOpened = true;
666
667    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
668            camEvtHandle, (void *)this);
669
670    if (rc < 0) {
671        ALOGE("%s: Error, failed to register event callback", __func__);
672        /* Not closing camera here since it is already handled in destructor */
673        return FAILED_TRANSACTION;
674    }
675    mFirstConfiguration = true;
676    return NO_ERROR;
677}
678
679/*===========================================================================
680 * FUNCTION   : closeCamera
681 *
682 * DESCRIPTION: close camera
683 *
684 * PARAMETERS : none
685 *
686 * RETURN     : int32_t type of status
687 *              NO_ERROR  -- success
688 *              none-zero failure code
689 *==========================================================================*/
690int QCamera3HardwareInterface::closeCamera()
691{
692    ATRACE_CALL();
693    int rc = NO_ERROR;
694
695    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
696    mCameraHandle = NULL;
697    mCameraOpened = false;
698
699    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
700        CDBG("%s: Failed to release flash for camera id: %d",
701                __func__,
702                mCameraId);
703    }
704
705    return rc;
706}
707
708/*===========================================================================
709 * FUNCTION   : initialize
710 *
711 * DESCRIPTION: Initialize frameworks callback functions
712 *
713 * PARAMETERS :
714 *   @callback_ops : callback function to frameworks
715 *
716 * RETURN     :
717 *
718 *==========================================================================*/
719int QCamera3HardwareInterface::initialize(
720        const struct camera3_callback_ops *callback_ops)
721{
722    ATRACE_CALL();
723    int rc;
724
725    pthread_mutex_lock(&mMutex);
726
727    rc = initParameters();
728    if (rc < 0) {
729        ALOGE("%s: initParamters failed %d", __func__, rc);
730       goto err1;
731    }
732    mCallbackOps = callback_ops;
733
734    mChannelHandle = mCameraHandle->ops->add_channel(
735            mCameraHandle->camera_handle, NULL, NULL, this);
736    if (mChannelHandle == 0) {
737        ALOGE("%s: add_channel failed", __func__);
738        rc = -ENOMEM;
739        pthread_mutex_unlock(&mMutex);
740        return rc;
741    }
742
743    pthread_mutex_unlock(&mMutex);
744    mCameraInitialized = true;
745    return 0;
746
747err1:
748    pthread_mutex_unlock(&mMutex);
749    return rc;
750}
751
752/*===========================================================================
753 * FUNCTION   : validateStreamDimensions
754 *
755 * DESCRIPTION: Check if the configuration requested are those advertised
756 *
757 * PARAMETERS :
758 *   @stream_list : streams to be configured
759 *
760 * RETURN     :
761 *
762 *==========================================================================*/
763int QCamera3HardwareInterface::validateStreamDimensions(
764        camera3_stream_configuration_t *streamList)
765{
766    int rc = NO_ERROR;
767    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
768    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
769    size_t count = 0;
770
771    camera3_stream_t *inputStream = NULL;
772    /*
773    * Loop through all streams to find input stream if it exists*
774    */
775    for (size_t i = 0; i< streamList->num_streams; i++) {
776        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
777            if (inputStream != NULL) {
778                ALOGE("%s: Error, Multiple input streams requested");
779                return -EINVAL;
780            }
781            inputStream = streamList->streams[i];
782        }
783    }
784    /*
785    * Loop through all streams requested in configuration
786    * Check if unsupported sizes have been requested on any of them
787    */
788    for (size_t j = 0; j < streamList->num_streams; j++) {
789        bool sizeFound = false;
790        size_t jpeg_sizes_cnt = 0;
791        camera3_stream_t *newStream = streamList->streams[j];
792
793        uint32_t rotatedHeight = newStream->height;
794        uint32_t rotatedWidth = newStream->width;
795        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
796                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
797            rotatedHeight = newStream->width;
798            rotatedWidth = newStream->height;
799        }
800
801        /*
802        * Sizes are different for each type of stream format check against
803        * appropriate table.
804        */
805        switch (newStream->format) {
806        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
807        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
808        case HAL_PIXEL_FORMAT_RAW10:
809            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
810            for (size_t i = 0; i < count; i++) {
811                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
812                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
813                    sizeFound = true;
814                    break;
815                }
816            }
817            break;
818        case HAL_PIXEL_FORMAT_BLOB:
819            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
820            /* Generate JPEG sizes table */
821            makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
822                    count,
823                    MAX_SIZES_CNT,
824                    available_processed_sizes);
825            jpeg_sizes_cnt = filterJpegSizes(
826                    available_jpeg_sizes,
827                    available_processed_sizes,
828                    count * 2,
829                    MAX_SIZES_CNT * 2,
830                    gCamCapability[mCameraId]->active_array_size,
831                    gCamCapability[mCameraId]->max_downscale_factor);
832
833            /* Verify set size against generated sizes table */
834            for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
835                if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
836                        ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
837                    sizeFound = true;
838                    break;
839                }
840            }
841            break;
842        case HAL_PIXEL_FORMAT_YCbCr_420_888:
843        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
844        default:
845            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
846                    || newStream->stream_type == CAMERA3_STREAM_INPUT
847                    || IS_USAGE_ZSL(newStream->usage)) {
848                if (((int32_t)rotatedWidth ==
849                                gCamCapability[mCameraId]->active_array_size.width) &&
850                                ((int32_t)rotatedHeight ==
851                                gCamCapability[mCameraId]->active_array_size.height)) {
852                    sizeFound = true;
853                    break;
854                }
855                /* We could potentially break here to enforce ZSL stream
856                 * set from frameworks always is full active array size
857                 * but it is not clear from the spc if framework will always
858                 * follow that, also we have logic to override to full array
859                 * size, so keeping the logic lenient at the moment
860                 */
861            }
862            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
863                    MAX_SIZES_CNT);
864            for (size_t i = 0; i < count; i++) {
865                if (((int32_t)rotatedWidth ==
866                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
867                            ((int32_t)rotatedHeight ==
868                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
869                    sizeFound = true;
870                    break;
871                }
872            }
873            break;
874        } /* End of switch(newStream->format) */
875
876        /* We error out even if a single stream has unsupported size set */
877        if (!sizeFound) {
878            ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
879                  "type:%d", __func__, rotatedWidth, rotatedHeight,
880                  newStream->format);
881            ALOGE("%s: Active array size is  %d x %d", __func__,
882                    gCamCapability[mCameraId]->active_array_size.width,
883                    gCamCapability[mCameraId]->active_array_size.height);
884            rc = -EINVAL;
885            break;
886        }
887    } /* End of for each stream */
888    return rc;
889}
890
891/*==============================================================================
892 * FUNCTION   : isSupportChannelNeeded
893 *
894 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
895 *
896 * PARAMETERS :
897 *   @stream_list : streams to be configured
898 *   @stream_config_info : the config info for streams to be configured
899 *
900 * RETURN     : Boolen true/false decision
901 *
902 *==========================================================================*/
903bool QCamera3HardwareInterface::isSupportChannelNeeded(
904        camera3_stream_configuration_t *streamList,
905        cam_stream_size_info_t stream_config_info)
906{
907    uint32_t i;
908    bool pprocRequested = false;
909    /* Check for conditions where PProc pipeline does not have any streams*/
910    for (i = 0; i < stream_config_info.num_streams; i++) {
911        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
912                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
913            pprocRequested = true;
914            break;
915        }
916    }
917
918    if (pprocRequested == false )
919        return true;
920
921    /* Dummy stream needed if only raw or jpeg streams present */
922    for (i = 0; i < streamList->num_streams; i++) {
923        switch(streamList->streams[i]->format) {
924            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
925            case HAL_PIXEL_FORMAT_RAW10:
926            case HAL_PIXEL_FORMAT_RAW16:
927            case HAL_PIXEL_FORMAT_BLOB:
928                break;
929            default:
930                return false;
931        }
932    }
933    return true;
934}
935
936/*==============================================================================
937 * FUNCTION   : getSensorOutputSize
938 *
939 * DESCRIPTION: Get sensor output size based on current stream configuratoin
940 *
941 * PARAMETERS :
942 *   @sensor_dim : sensor output dimension (output)
943 *
944 * RETURN     : int32_t type of status
945 *              NO_ERROR  -- success
946 *              none-zero failure code
947 *
948 *==========================================================================*/
949int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
950{
951    int32_t rc = NO_ERROR;
952
953    cam_dimension_t max_dim = {0, 0};
954    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
955        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
956            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
957        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
958            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
959    }
960
961    clear_metadata_buffer(mParameters);
962
963    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
964            max_dim);
965    if (rc != NO_ERROR) {
966        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
967        return rc;
968    }
969
970    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
971    if (rc != NO_ERROR) {
972        ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
973        return rc;
974    }
975
976    clear_metadata_buffer(mParameters);
977    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
978
979    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
980            mParameters);
981    if (rc != NO_ERROR) {
982        ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
983        return rc;
984    }
985
986    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
987    ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
988
989    return rc;
990}
991
992/*==============================================================================
993 * FUNCTION   : enablePowerHint
994 *
995 * DESCRIPTION: enable single powerhint for preview and different video modes.
996 *
997 * PARAMETERS :
998 *
999 * RETURN     : NULL
1000 *
1001 *==========================================================================*/
1002void QCamera3HardwareInterface::enablePowerHint()
1003{
1004    if (!mPowerHintEnabled) {
1005        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1006        mPowerHintEnabled = true;
1007    }
1008}
1009
1010/*==============================================================================
1011 * FUNCTION   : disablePowerHint
1012 *
1013 * DESCRIPTION: disable current powerhint.
1014 *
1015 * PARAMETERS :
1016 *
1017 * RETURN     : NULL
1018 *
1019 *==========================================================================*/
1020void QCamera3HardwareInterface::disablePowerHint()
1021{
1022    if (mPowerHintEnabled) {
1023        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1024        mPowerHintEnabled = false;
1025    }
1026}
1027
1028/*===========================================================================
1029 * FUNCTION   : configureStreams
1030 *
1031 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1032 *              and output streams.
1033 *
1034 * PARAMETERS :
1035 *   @stream_list : streams to be configured
1036 *
1037 * RETURN     :
1038 *
1039 *==========================================================================*/
1040int QCamera3HardwareInterface::configureStreams(
1041        camera3_stream_configuration_t *streamList)
1042{
1043    ATRACE_CALL();
1044    int rc = 0;
1045
1046    // Acquire perfLock before configure streams
1047    m_perfLock.lock_acq();
1048    rc = configureStreamsPerfLocked(streamList);
1049    m_perfLock.lock_rel();
1050
1051    return rc;
1052}
1053
1054/*===========================================================================
1055 * FUNCTION   : configureStreamsPerfLocked
1056 *
1057 * DESCRIPTION: configureStreams while perfLock is held.
1058 *
1059 * PARAMETERS :
1060 *   @stream_list : streams to be configured
1061 *
1062 * RETURN     : int32_t type of status
1063 *              NO_ERROR  -- success
1064 *              none-zero failure code
1065 *==========================================================================*/
1066int QCamera3HardwareInterface::configureStreamsPerfLocked(
1067        camera3_stream_configuration_t *streamList)
1068{
1069    ATRACE_CALL();
1070    int rc = 0;
1071
1072    // Sanity check stream_list
1073    if (streamList == NULL) {
1074        ALOGE("%s: NULL stream configuration", __func__);
1075        return BAD_VALUE;
1076    }
1077    if (streamList->streams == NULL) {
1078        ALOGE("%s: NULL stream list", __func__);
1079        return BAD_VALUE;
1080    }
1081
1082    if (streamList->num_streams < 1) {
1083        ALOGE("%s: Bad number of streams requested: %d", __func__,
1084                streamList->num_streams);
1085        return BAD_VALUE;
1086    }
1087
1088    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1089        ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1090                MAX_NUM_STREAMS, streamList->num_streams);
1091        return BAD_VALUE;
1092    }
1093
1094    mOpMode = streamList->operation_mode;
1095    CDBG("%s: mOpMode: %d", __func__, mOpMode);
1096
1097    /* first invalidate all the steams in the mStreamList
1098     * if they appear again, they will be validated */
1099    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1100            it != mStreamInfo.end(); it++) {
1101        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1102        channel->stop();
1103        (*it)->status = INVALID;
1104    }
1105
1106    if (mRawDumpChannel) {
1107        mRawDumpChannel->stop();
1108        delete mRawDumpChannel;
1109        mRawDumpChannel = NULL;
1110    }
1111
1112    if (mSupportChannel)
1113        mSupportChannel->stop();
1114
1115    if (mAnalysisChannel) {
1116        mAnalysisChannel->stop();
1117    }
1118    if (mMetadataChannel) {
1119        /* If content of mStreamInfo is not 0, there is metadata stream */
1120        mMetadataChannel->stop();
1121    }
1122    if (mChannelHandle) {
1123        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1124                mChannelHandle);
1125        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1126    }
1127
1128    pthread_mutex_lock(&mMutex);
1129
1130    /* Check whether we have video stream */
1131    m_bIs4KVideo = false;
1132    m_bIsVideo = false;
1133    m_bEisSupportedSize = false;
1134    m_bTnrEnabled = false;
1135    bool isZsl = false;
1136    uint32_t videoWidth = 0U;
1137    uint32_t videoHeight = 0U;
1138    size_t rawStreamCnt = 0;
1139    size_t stallStreamCnt = 0;
1140    size_t processedStreamCnt = 0;
1141    // Number of streams on ISP encoder path
1142    size_t numStreamsOnEncoder = 0;
1143    size_t numYuv888OnEncoder = 0;
1144    bool bYuv888OverrideJpeg = false;
1145    cam_dimension_t largeYuv888Size = {0, 0};
1146    cam_dimension_t maxViewfinderSize = {0, 0};
1147    bool bJpegExceeds4K = false;
1148    bool bUseCommonFeatureMask = false;
1149    uint32_t commonFeatureMask = 0;
1150    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1151    camera3_stream_t *inputStream = NULL;
1152    bool isJpeg = false;
1153    cam_dimension_t jpegSize = {0, 0};
1154
1155    /*EIS configuration*/
1156    bool eisSupported = false;
1157    bool oisSupported = false;
1158    int32_t margin_index = -1;
1159    uint8_t eis_prop_set;
1160    uint32_t maxEisWidth = 0;
1161    uint32_t maxEisHeight = 0;
1162    int32_t hal_version = CAM_HAL_V3;
1163
1164    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1165
1166    size_t count = IS_TYPE_MAX;
1167    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1168    for (size_t i = 0; i < count; i++) {
1169        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1170            eisSupported = true;
1171            margin_index = (int32_t)i;
1172            break;
1173        }
1174    }
1175
1176    count = CAM_OPT_STAB_MAX;
1177    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1178    for (size_t i = 0; i < count; i++) {
1179        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1180            oisSupported = true;
1181            break;
1182        }
1183    }
1184
1185    if (eisSupported) {
1186        maxEisWidth = MAX_EIS_WIDTH;
1187        maxEisHeight = MAX_EIS_HEIGHT;
1188    }
1189
1190    /* EIS setprop control */
1191    char eis_prop[PROPERTY_VALUE_MAX];
1192    memset(eis_prop, 0, sizeof(eis_prop));
1193    property_get("persist.camera.eis.enable", eis_prop, "0");
1194    eis_prop_set = (uint8_t)atoi(eis_prop);
1195
1196    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1197            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1198
1199    /* stream configurations */
1200    for (size_t i = 0; i < streamList->num_streams; i++) {
1201        camera3_stream_t *newStream = streamList->streams[i];
1202        ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1203                "height = %d, rotation = %d, usage = 0x%x",
1204                __func__, i, newStream->stream_type, newStream->format,
1205                newStream->width, newStream->height, newStream->rotation,
1206                newStream->usage);
1207        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1208                newStream->stream_type == CAMERA3_STREAM_INPUT){
1209            isZsl = true;
1210        }
1211        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1212            inputStream = newStream;
1213        }
1214
1215        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1216            isJpeg = true;
1217            jpegSize.width = newStream->width;
1218            jpegSize.height = newStream->height;
1219            if (newStream->width > VIDEO_4K_WIDTH ||
1220                    newStream->height > VIDEO_4K_HEIGHT)
1221                bJpegExceeds4K = true;
1222        }
1223
1224        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1225                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1226            m_bIsVideo = true;
1227            videoWidth = newStream->width;
1228            videoHeight = newStream->height;
1229            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1230                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1231                m_bIs4KVideo = true;
1232            }
1233            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1234                                  (newStream->height <= maxEisHeight);
1235        }
1236        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1237                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1238            switch (newStream->format) {
1239            case HAL_PIXEL_FORMAT_BLOB:
1240                stallStreamCnt++;
1241                if (isOnEncoder(maxViewfinderSize, newStream->width,
1242                        newStream->height)) {
1243                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1244                    numStreamsOnEncoder++;
1245                }
1246                break;
1247            case HAL_PIXEL_FORMAT_RAW10:
1248            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1249            case HAL_PIXEL_FORMAT_RAW16:
1250                rawStreamCnt++;
1251                break;
1252            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1253                processedStreamCnt++;
1254                if (isOnEncoder(maxViewfinderSize, newStream->width,
1255                        newStream->height)) {
1256                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1257                            IS_USAGE_ZSL(newStream->usage)) {
1258                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1259                    } else {
1260                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1261                    }
1262                    numStreamsOnEncoder++;
1263                }
1264                break;
1265            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1266                processedStreamCnt++;
1267                if (isOnEncoder(maxViewfinderSize, newStream->width,
1268                        newStream->height)) {
1269                    // If Yuv888 size is not greater than 4K, set feature mask
1270                    // to SUPERSET so that it support concurrent request on
1271                    // YUV and JPEG.
1272                    if (newStream->width <= VIDEO_4K_WIDTH &&
1273                            newStream->height <= VIDEO_4K_HEIGHT) {
1274                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1275                    } else {
1276                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1277                    }
1278                    numStreamsOnEncoder++;
1279                    numYuv888OnEncoder++;
1280                    largeYuv888Size.width = newStream->width;
1281                    largeYuv888Size.height = newStream->height;
1282                }
1283                break;
1284            default:
1285                processedStreamCnt++;
1286                if (isOnEncoder(maxViewfinderSize, newStream->width,
1287                        newStream->height)) {
1288                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1289                    numStreamsOnEncoder++;
1290                }
1291                break;
1292            }
1293
1294        }
1295    }
1296
1297    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1298        !m_bIsVideo) {
1299        m_bEisEnable = false;
1300    }
1301
1302    /* Logic to enable/disable TNR based on specific config size/etc.*/
1303    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1304            ((videoWidth == 1920 && videoHeight == 1080) ||
1305            (videoWidth == 1280 && videoHeight == 720)) &&
1306            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1307        m_bTnrEnabled = true;
1308
1309    /* Check if num_streams is sane */
1310    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1311            rawStreamCnt > MAX_RAW_STREAMS ||
1312            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1313        ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1314                __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1315        pthread_mutex_unlock(&mMutex);
1316        return -EINVAL;
1317    }
1318    /* Check whether we have zsl stream or 4k video case */
1319    if (isZsl && m_bIsVideo) {
1320        ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1321        pthread_mutex_unlock(&mMutex);
1322        return -EINVAL;
1323    }
1324    /* Check if stream sizes are sane */
1325    if (numStreamsOnEncoder > 2) {
1326        ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1327                __func__);
1328        pthread_mutex_unlock(&mMutex);
1329        return -EINVAL;
1330    } else if (1 < numStreamsOnEncoder){
1331        bUseCommonFeatureMask = true;
1332        CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1333                __func__);
1334    }
1335
1336    /* Check if BLOB size is greater than 4k in 4k recording case */
1337    if (m_bIs4KVideo && bJpegExceeds4K) {
1338        ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1339                __func__);
1340        pthread_mutex_unlock(&mMutex);
1341        return -EINVAL;
1342    }
1343
1344    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1345    // the YUV stream's size is greater or equal to the JPEG size, set common
1346    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1347    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1348            jpegSize.width, jpegSize.height) &&
1349            largeYuv888Size.width > jpegSize.width &&
1350            largeYuv888Size.height > jpegSize.height) {
1351        bYuv888OverrideJpeg = true;
1352    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1353        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1354    }
1355
1356    rc = validateStreamDimensions(streamList);
1357    if (rc == NO_ERROR) {
1358        rc = validateStreamRotations(streamList);
1359    }
1360    if (rc != NO_ERROR) {
1361        ALOGE("%s: Invalid stream configuration requested!", __func__);
1362        pthread_mutex_unlock(&mMutex);
1363        return rc;
1364    }
1365
1366    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1367    camera3_stream_t *jpegStream = NULL;
1368    for (size_t i = 0; i < streamList->num_streams; i++) {
1369        camera3_stream_t *newStream = streamList->streams[i];
1370        CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1371                "stream size : %d x %d, stream rotation = %d",
1372                __func__, newStream->stream_type, newStream->format,
1373                newStream->width, newStream->height, newStream->rotation);
1374        //if the stream is in the mStreamList validate it
1375        bool stream_exists = false;
1376        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1377                it != mStreamInfo.end(); it++) {
1378            if ((*it)->stream == newStream) {
1379                QCamera3ProcessingChannel *channel =
1380                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1381                stream_exists = true;
1382                if (channel)
1383                    delete channel;
1384                (*it)->status = VALID;
1385                (*it)->stream->priv = NULL;
1386                (*it)->channel = NULL;
1387            }
1388        }
1389        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1390            //new stream
1391            stream_info_t* stream_info;
1392            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1393            if (!stream_info) {
1394               ALOGE("%s: Could not allocate stream info", __func__);
1395               rc = -ENOMEM;
1396               pthread_mutex_unlock(&mMutex);
1397               return rc;
1398            }
1399            stream_info->stream = newStream;
1400            stream_info->status = VALID;
1401            stream_info->channel = NULL;
1402            mStreamInfo.push_back(stream_info);
1403        }
1404        /* Covers Opaque ZSL and API1 F/W ZSL */
1405        if (IS_USAGE_ZSL(newStream->usage)
1406                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1407            if (zslStream != NULL) {
1408                ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1409                pthread_mutex_unlock(&mMutex);
1410                return BAD_VALUE;
1411            }
1412            zslStream = newStream;
1413        }
1414        /* Covers YUV reprocess */
1415        if (inputStream != NULL) {
1416            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1417                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1418                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1419                    && inputStream->width == newStream->width
1420                    && inputStream->height == newStream->height) {
1421                if (zslStream != NULL) {
1422                    /* This scenario indicates multiple YUV streams with same size
1423                     * as input stream have been requested, since zsl stream handle
1424                     * is solely use for the purpose of overriding the size of streams
1425                     * which share h/w streams we will just make a guess here as to
1426                     * which of the stream is a ZSL stream, this will be refactored
1427                     * once we make generic logic for streams sharing encoder output
1428                     */
1429                    CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1430                }
1431                zslStream = newStream;
1432            }
1433        }
1434        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1435            jpegStream = newStream;
1436        }
1437    }
1438
1439    /* If a zsl stream is set, we know that we have configured at least one input or
1440       bidirectional stream */
1441    if (NULL != zslStream) {
1442        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1443        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1444        mInputStreamInfo.format = zslStream->format;
1445        mInputStreamInfo.usage = zslStream->usage;
1446        CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1447                __func__, mInputStreamInfo.dim.width,
1448                mInputStreamInfo.dim.height,
1449                mInputStreamInfo.format, mInputStreamInfo.usage);
1450    }
1451
1452    cleanAndSortStreamInfo();
1453    if (mMetadataChannel) {
1454        delete mMetadataChannel;
1455        mMetadataChannel = NULL;
1456    }
1457    if (mSupportChannel) {
1458        delete mSupportChannel;
1459        mSupportChannel = NULL;
1460    }
1461
1462    if (mAnalysisChannel) {
1463        delete mAnalysisChannel;
1464        mAnalysisChannel = NULL;
1465    }
1466
1467    if (mDummyBatchChannel) {
1468        delete mDummyBatchChannel;
1469        mDummyBatchChannel = NULL;
1470    }
1471
1472    //Create metadata channel and initialize it
1473    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1474                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1475                    &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1476    if (mMetadataChannel == NULL) {
1477        ALOGE("%s: failed to allocate metadata channel", __func__);
1478        rc = -ENOMEM;
1479        pthread_mutex_unlock(&mMutex);
1480        return rc;
1481    }
1482    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1483    if (rc < 0) {
1484        ALOGE("%s: metadata channel initialization failed", __func__);
1485        delete mMetadataChannel;
1486        mMetadataChannel = NULL;
1487        pthread_mutex_unlock(&mMutex);
1488        return rc;
1489    }
1490
1491    // Create analysis stream all the time, even when h/w support is not available
1492    {
1493        mAnalysisChannel = new QCamera3SupportChannel(
1494                mCameraHandle->camera_handle,
1495                mChannelHandle,
1496                mCameraHandle->ops,
1497                &gCamCapability[mCameraId]->padding_info,
1498                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1499                CAM_STREAM_TYPE_ANALYSIS,
1500                &gCamCapability[mCameraId]->analysis_recommended_res,
1501                gCamCapability[mCameraId]->analysis_recommended_format,
1502                this,
1503                0); // force buffer count to 0
1504        if (!mAnalysisChannel) {
1505            ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1506            pthread_mutex_unlock(&mMutex);
1507            return -ENOMEM;
1508        }
1509    }
1510
1511    bool isRawStreamRequested = false;
1512    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1513    /* Allocate channel objects for the requested streams */
1514    for (size_t i = 0; i < streamList->num_streams; i++) {
1515        camera3_stream_t *newStream = streamList->streams[i];
1516        uint32_t stream_usage = newStream->usage;
1517        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1518        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1519        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1520                || IS_USAGE_ZSL(newStream->usage)) &&
1521            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1522            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1523            if (bUseCommonFeatureMask) {
1524                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1525                        commonFeatureMask;
1526            } else {
1527                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1528                        CAM_QCOM_FEATURE_NONE;
1529            }
1530
1531        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1532                CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1533        } else {
1534            //for non zsl streams find out the format
1535            switch (newStream->format) {
1536            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1537              {
1538                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1539                         = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1540
1541                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1542
1543                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1544                     if (m_bTnrEnabled && m_bTnrVideo) {
1545                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1546                             CAM_QCOM_FEATURE_CPP_TNR;
1547                     }
1548
1549                 } else {
1550
1551                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1552                     if (m_bTnrEnabled && m_bTnrPreview) {
1553                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1554                             CAM_QCOM_FEATURE_CPP_TNR;
1555                     }
1556                 }
1557
1558                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1559                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1560                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1561                             newStream->height;
1562                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1563                             newStream->width;
1564                 }
1565              }
1566              break;
1567           case HAL_PIXEL_FORMAT_YCbCr_420_888:
1568              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1569              if (isOnEncoder(maxViewfinderSize, newStream->width,
1570                      newStream->height)) {
1571                  if (bUseCommonFeatureMask)
1572                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1573                              commonFeatureMask;
1574                  else
1575                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1576                              CAM_QCOM_FEATURE_NONE;
1577              } else {
1578                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1579                          CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1580              }
1581              break;
1582           case HAL_PIXEL_FORMAT_BLOB:
1583              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1584              if (m_bIs4KVideo && !isZsl) {
1585                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1586                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1587              } else {
1588                  if (bUseCommonFeatureMask &&
1589                          isOnEncoder(maxViewfinderSize, newStream->width,
1590                                  newStream->height)) {
1591                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1592                  } else {
1593                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1594                  }
1595              }
1596              if (isZsl) {
1597                  if (zslStream) {
1598                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1599                              (int32_t)zslStream->width;
1600                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1601                              (int32_t)zslStream->height;
1602                  } else {
1603                      ALOGE("%s: Error, No ZSL stream identified",__func__);
1604                      pthread_mutex_unlock(&mMutex);
1605                      return -EINVAL;
1606                  }
1607              } else if (m_bIs4KVideo) {
1608                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1609                          (int32_t)videoWidth;
1610                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1611                          (int32_t)videoHeight;
1612              } else if (bYuv888OverrideJpeg) {
1613                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1614                          (int32_t)largeYuv888Size.width;
1615                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1616                          (int32_t)largeYuv888Size.height;
1617              }
1618              break;
1619           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1620           case HAL_PIXEL_FORMAT_RAW16:
1621           case HAL_PIXEL_FORMAT_RAW10:
1622              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1623              isRawStreamRequested = true;
1624              break;
1625           default:
1626              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1627              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1628              break;
1629            }
1630
1631        }
1632
1633        if (newStream->priv == NULL) {
1634            //New stream, construct channel
1635            switch (newStream->stream_type) {
1636            case CAMERA3_STREAM_INPUT:
1637                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1638                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1639                break;
1640            case CAMERA3_STREAM_BIDIRECTIONAL:
1641                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1642                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1643                break;
1644            case CAMERA3_STREAM_OUTPUT:
1645                /* For video encoding stream, set read/write rarely
1646                 * flag so that they may be set to un-cached */
1647                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1648                    newStream->usage |=
1649                         (GRALLOC_USAGE_SW_READ_RARELY |
1650                         GRALLOC_USAGE_SW_WRITE_RARELY |
1651                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1652                else if (IS_USAGE_ZSL(newStream->usage))
1653                    CDBG("%s: ZSL usage flag skipping", __func__);
1654                else if (newStream == zslStream
1655                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1656                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1657                } else
1658                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1659                break;
1660            default:
1661                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1662                break;
1663            }
1664
1665            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1666                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1667                QCamera3ProcessingChannel *channel = NULL;
1668                switch (newStream->format) {
1669                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1670                    if ((newStream->usage &
1671                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1672                            (streamList->operation_mode ==
1673                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1674                    ) {
1675                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1676                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1677                                &gCamCapability[mCameraId]->padding_info,
1678                                this,
1679                                newStream,
1680                                (cam_stream_type_t)
1681                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1682                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1683                                mMetadataChannel,
1684                                0); //heap buffers are not required for HFR video channel
1685                        if (channel == NULL) {
1686                            ALOGE("%s: allocation of channel failed", __func__);
1687                            pthread_mutex_unlock(&mMutex);
1688                            return -ENOMEM;
1689                        }
1690                        //channel->getNumBuffers() will return 0 here so use
1691                        //MAX_INFLIGH_HFR_REQUESTS
1692                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1693                        newStream->priv = channel;
1694                        ALOGI("%s: num video buffers in HFR mode: %d",
1695                                __func__, MAX_INFLIGHT_HFR_REQUESTS);
1696                    } else {
1697                        /* Copy stream contents in HFR preview only case to create
1698                         * dummy batch channel so that sensor streaming is in
1699                         * HFR mode */
1700                        if (!m_bIsVideo && (streamList->operation_mode ==
1701                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1702                            mDummyBatchStream = *newStream;
1703                        }
1704                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1705                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1706                                &gCamCapability[mCameraId]->padding_info,
1707                                this,
1708                                newStream,
1709                                (cam_stream_type_t)
1710                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1711                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1712                                mMetadataChannel,
1713                                MAX_INFLIGHT_REQUESTS);
1714                        if (channel == NULL) {
1715                            ALOGE("%s: allocation of channel failed", __func__);
1716                            pthread_mutex_unlock(&mMutex);
1717                            return -ENOMEM;
1718                        }
1719                        newStream->max_buffers = channel->getNumBuffers();
1720                        newStream->priv = channel;
1721                    }
1722                    break;
1723                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1724                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1725                            mChannelHandle,
1726                            mCameraHandle->ops, captureResultCb,
1727                            &gCamCapability[mCameraId]->padding_info,
1728                            this,
1729                            newStream,
1730                            (cam_stream_type_t)
1731                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1732                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1733                            mMetadataChannel);
1734                    if (channel == NULL) {
1735                        ALOGE("%s: allocation of YUV channel failed", __func__);
1736                        pthread_mutex_unlock(&mMutex);
1737                        return -ENOMEM;
1738                    }
1739                    newStream->max_buffers = channel->getNumBuffers();
1740                    newStream->priv = channel;
1741                    break;
1742                }
1743                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1744                case HAL_PIXEL_FORMAT_RAW16:
1745                case HAL_PIXEL_FORMAT_RAW10:
1746                    mRawChannel = new QCamera3RawChannel(
1747                            mCameraHandle->camera_handle, mChannelHandle,
1748                            mCameraHandle->ops, captureResultCb,
1749                            &gCamCapability[mCameraId]->padding_info,
1750                            this, newStream, CAM_QCOM_FEATURE_NONE,
1751                            mMetadataChannel,
1752                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1753                    if (mRawChannel == NULL) {
1754                        ALOGE("%s: allocation of raw channel failed", __func__);
1755                        pthread_mutex_unlock(&mMutex);
1756                        return -ENOMEM;
1757                    }
1758                    newStream->max_buffers = mRawChannel->getNumBuffers();
1759                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1760                    break;
1761                case HAL_PIXEL_FORMAT_BLOB:
1762                    // Max live snapshot inflight buffer is 1. This is to mitigate
1763                    // frame drop issues for video snapshot. The more buffers being
1764                    // allocated, the more frame drops there are.
1765                    mPictureChannel = new QCamera3PicChannel(
1766                            mCameraHandle->camera_handle, mChannelHandle,
1767                            mCameraHandle->ops, captureResultCb,
1768                            &gCamCapability[mCameraId]->padding_info, this, newStream,
1769                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1770                            m_bIs4KVideo, isZsl, mMetadataChannel,
1771                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1772                    if (mPictureChannel == NULL) {
1773                        ALOGE("%s: allocation of channel failed", __func__);
1774                        pthread_mutex_unlock(&mMutex);
1775                        return -ENOMEM;
1776                    }
1777                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1778                    newStream->max_buffers = mPictureChannel->getNumBuffers();
1779                    mPictureChannel->overrideYuvSize(
1780                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1781                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1782                    break;
1783
1784                default:
1785                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1786                    break;
1787                }
1788            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1789                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1790            } else {
1791                ALOGE("%s: Error, Unknown stream type", __func__);
1792                return -EINVAL;
1793            }
1794
1795            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1796                    it != mStreamInfo.end(); it++) {
1797                if ((*it)->stream == newStream) {
1798                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1799                    break;
1800                }
1801            }
1802        } else {
1803            // Channel already exists for this stream
1804            // Do nothing for now
1805        }
1806
1807    /* Do not add entries for input stream in metastream info
1808         * since there is no real stream associated with it
1809         */
1810        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1811            mStreamConfigInfo.num_streams++;
1812    }
1813
1814    //RAW DUMP channel
1815    if (mEnableRawDump && isRawStreamRequested == false){
1816        cam_dimension_t rawDumpSize;
1817        rawDumpSize = getMaxRawSize(mCameraId);
1818        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1819                                  mChannelHandle,
1820                                  mCameraHandle->ops,
1821                                  rawDumpSize,
1822                                  &gCamCapability[mCameraId]->padding_info,
1823                                  this, CAM_QCOM_FEATURE_NONE);
1824        if (!mRawDumpChannel) {
1825            ALOGE("%s: Raw Dump channel cannot be created", __func__);
1826            pthread_mutex_unlock(&mMutex);
1827            return -ENOMEM;
1828        }
1829    }
1830
1831
1832    if (mAnalysisChannel) {
1833        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1834                gCamCapability[mCameraId]->analysis_recommended_res;
1835        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1836                CAM_STREAM_TYPE_ANALYSIS;
1837        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1838                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1839        mStreamConfigInfo.num_streams++;
1840    }
1841
1842    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1843        mSupportChannel = new QCamera3SupportChannel(
1844                mCameraHandle->camera_handle,
1845                mChannelHandle,
1846                mCameraHandle->ops,
1847                &gCamCapability[mCameraId]->padding_info,
1848                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1849                CAM_STREAM_TYPE_CALLBACK,
1850                &QCamera3SupportChannel::kDim,
1851                CAM_FORMAT_YUV_420_NV21,
1852                this);
1853        if (!mSupportChannel) {
1854            ALOGE("%s: dummy channel cannot be created", __func__);
1855            pthread_mutex_unlock(&mMutex);
1856            return -ENOMEM;
1857        }
1858    }
1859
1860    if (mSupportChannel) {
1861        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1862                QCamera3SupportChannel::kDim;
1863        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1864                CAM_STREAM_TYPE_CALLBACK;
1865        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1866                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1867        mStreamConfigInfo.num_streams++;
1868    }
1869
1870    if (mRawDumpChannel) {
1871        cam_dimension_t rawSize;
1872        rawSize = getMaxRawSize(mCameraId);
1873        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1874                rawSize;
1875        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1876                CAM_STREAM_TYPE_RAW;
1877        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1878                CAM_QCOM_FEATURE_NONE;
1879        mStreamConfigInfo.num_streams++;
1880    }
1881    /* In HFR mode, if video stream is not added, create a dummy channel so that
1882     * ISP can create a batch mode even for preview only case. This channel is
1883     * never 'start'ed (no stream-on), it is only 'initialized'  */
1884    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1885            !m_bIsVideo) {
1886        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1887                mChannelHandle,
1888                mCameraHandle->ops, captureResultCb,
1889                &gCamCapability[mCameraId]->padding_info,
1890                this,
1891                &mDummyBatchStream,
1892                CAM_STREAM_TYPE_VIDEO,
1893                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1894                mMetadataChannel);
1895        if (NULL == mDummyBatchChannel) {
1896            ALOGE("%s: creation of mDummyBatchChannel failed."
1897                    "Preview will use non-hfr sensor mode ", __func__);
1898        }
1899    }
1900    if (mDummyBatchChannel) {
1901        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1902                mDummyBatchStream.width;
1903        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1904                mDummyBatchStream.height;
1905        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1906                CAM_STREAM_TYPE_VIDEO;
1907        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1908                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1909        mStreamConfigInfo.num_streams++;
1910    }
1911
1912    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1913    mStreamConfigInfo.buffer_info.max_buffers =
1914            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
1915
1916    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1917    for (pendingRequestIterator i = mPendingRequestsList.begin();
1918            i != mPendingRequestsList.end();) {
1919        i = erasePendingRequest(i);
1920    }
1921    mPendingFrameDropList.clear();
1922    // Initialize/Reset the pending buffers list
1923    mPendingBuffersMap.num_buffers = 0;
1924    mPendingBuffersMap.mPendingBufferList.clear();
1925    mPendingReprocessResultList.clear();
1926
1927    mFirstRequest = true;
1928    mCurJpegMeta.clear();
1929    //Get min frame duration for this streams configuration
1930    deriveMinFrameDuration();
1931
1932    /* Turn on video hint only if video stream is configured */
1933
1934    pthread_mutex_unlock(&mMutex);
1935
1936    return rc;
1937}
1938
1939/*===========================================================================
1940 * FUNCTION   : validateCaptureRequest
1941 *
1942 * DESCRIPTION: validate a capture request from camera service
1943 *
1944 * PARAMETERS :
1945 *   @request : request from framework to process
1946 *
1947 * RETURN     :
1948 *
1949 *==========================================================================*/
1950int QCamera3HardwareInterface::validateCaptureRequest(
1951                    camera3_capture_request_t *request)
1952{
1953    ssize_t idx = 0;
1954    const camera3_stream_buffer_t *b;
1955    CameraMetadata meta;
1956
1957    /* Sanity check the request */
1958    if (request == NULL) {
1959        ALOGE("%s: NULL capture request", __func__);
1960        return BAD_VALUE;
1961    }
1962
1963    if (request->settings == NULL && mFirstRequest) {
1964        /*settings cannot be null for the first request*/
1965        return BAD_VALUE;
1966    }
1967
1968    uint32_t frameNumber = request->frame_number;
1969    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1970        ALOGE("%s: Request %d: No output buffers provided!",
1971                __FUNCTION__, frameNumber);
1972        return BAD_VALUE;
1973    }
1974    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
1975        ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
1976                __func__, request->num_output_buffers, MAX_NUM_STREAMS);
1977        return BAD_VALUE;
1978    }
1979    if (request->input_buffer != NULL) {
1980        b = request->input_buffer;
1981        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1982            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1983                    __func__, frameNumber, (long)idx);
1984            return BAD_VALUE;
1985        }
1986        if (b->release_fence != -1) {
1987            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1988                    __func__, frameNumber, (long)idx);
1989            return BAD_VALUE;
1990        }
1991        if (b->buffer == NULL) {
1992            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1993                    __func__, frameNumber, (long)idx);
1994            return BAD_VALUE;
1995        }
1996    }
1997
1998    // Validate all buffers
1999    b = request->output_buffers;
2000    do {
2001        QCamera3ProcessingChannel *channel =
2002                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2003        if (channel == NULL) {
2004            ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
2005                    __func__, frameNumber, (long)idx);
2006            return BAD_VALUE;
2007        }
2008        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2009            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2010                    __func__, frameNumber, (long)idx);
2011            return BAD_VALUE;
2012        }
2013        if (b->release_fence != -1) {
2014            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2015                    __func__, frameNumber, (long)idx);
2016            return BAD_VALUE;
2017        }
2018        if (b->buffer == NULL) {
2019            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2020                    __func__, frameNumber, (long)idx);
2021            return BAD_VALUE;
2022        }
2023        if (*(b->buffer) == NULL) {
2024            ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2025                    __func__, frameNumber, (long)idx);
2026            return BAD_VALUE;
2027        }
2028        idx++;
2029        b = request->output_buffers + idx;
2030    } while (idx < (ssize_t)request->num_output_buffers);
2031
2032    return NO_ERROR;
2033}
2034
2035/*===========================================================================
2036 * FUNCTION   : deriveMinFrameDuration
2037 *
2038 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2039 *              on currently configured streams.
2040 *
2041 * PARAMETERS : NONE
2042 *
2043 * RETURN     : NONE
2044 *
2045 *==========================================================================*/
2046void QCamera3HardwareInterface::deriveMinFrameDuration()
2047{
2048    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2049
2050    maxJpegDim = 0;
2051    maxProcessedDim = 0;
2052    maxRawDim = 0;
2053
2054    // Figure out maximum jpeg, processed, and raw dimensions
2055    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2056        it != mStreamInfo.end(); it++) {
2057
2058        // Input stream doesn't have valid stream_type
2059        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2060            continue;
2061
2062        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2063        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2064            if (dimension > maxJpegDim)
2065                maxJpegDim = dimension;
2066        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2067                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2068                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2069            if (dimension > maxRawDim)
2070                maxRawDim = dimension;
2071        } else {
2072            if (dimension > maxProcessedDim)
2073                maxProcessedDim = dimension;
2074        }
2075    }
2076
2077    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2078            MAX_SIZES_CNT);
2079
2080    //Assume all jpeg dimensions are in processed dimensions.
2081    if (maxJpegDim > maxProcessedDim)
2082        maxProcessedDim = maxJpegDim;
2083    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2084    if (maxProcessedDim > maxRawDim) {
2085        maxRawDim = INT32_MAX;
2086
2087        for (size_t i = 0; i < count; i++) {
2088            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2089                    gCamCapability[mCameraId]->raw_dim[i].height;
2090            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2091                maxRawDim = dimension;
2092        }
2093    }
2094
2095    //Find minimum durations for processed, jpeg, and raw
2096    for (size_t i = 0; i < count; i++) {
2097        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2098                gCamCapability[mCameraId]->raw_dim[i].height) {
2099            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2100            break;
2101        }
2102    }
2103    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2104    for (size_t i = 0; i < count; i++) {
2105        if (maxProcessedDim ==
2106                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2107                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2108            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2109            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2110            break;
2111        }
2112    }
2113}
2114
2115/*===========================================================================
2116 * FUNCTION   : getMinFrameDuration
2117 *
2118 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2119 *              and current request configuration.
2120 *
2121 * PARAMETERS : @request: requset sent by the frameworks
2122 *
2123 * RETURN     : min farme duration for a particular request
2124 *
2125 *==========================================================================*/
2126int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2127{
2128    bool hasJpegStream = false;
2129    bool hasRawStream = false;
2130    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2131        const camera3_stream_t *stream = request->output_buffers[i].stream;
2132        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2133            hasJpegStream = true;
2134        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2135                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2136                stream->format == HAL_PIXEL_FORMAT_RAW16)
2137            hasRawStream = true;
2138    }
2139
2140    if (!hasJpegStream)
2141        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2142    else
2143        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2144}
2145
2146/*===========================================================================
2147 * FUNCTION   : handlePendingReprocResults
2148 *
2149 * DESCRIPTION: check and notify on any pending reprocess results
2150 *
2151 * PARAMETERS :
2152 *   @frame_number   : Pending request frame number
2153 *
2154 * RETURN     : int32_t type of status
2155 *              NO_ERROR  -- success
2156 *              none-zero failure code
2157 *==========================================================================*/
2158int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2159{
2160    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2161            j != mPendingReprocessResultList.end(); j++) {
2162        if (j->frame_number == frame_number) {
2163            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2164
2165            CDBG("%s: Delayed reprocess notify %d", __func__,
2166                    frame_number);
2167
2168            for (pendingRequestIterator k = mPendingRequestsList.begin();
2169                    k != mPendingRequestsList.end(); k++) {
2170
2171                if (k->frame_number == j->frame_number) {
2172                    CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2173                            "Take it out!!", __func__,
2174                            k->frame_number);
2175
2176                    camera3_capture_result result;
2177                    memset(&result, 0, sizeof(camera3_capture_result));
2178                    result.frame_number = frame_number;
2179                    result.num_output_buffers = 1;
2180                    result.output_buffers =  &j->buffer;
2181                    result.input_buffer = k->input_buffer;
2182                    result.result = k->settings;
2183                    result.partial_result = PARTIAL_RESULT_COUNT;
2184                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2185
2186                    erasePendingRequest(k);
2187                    break;
2188                }
2189            }
2190            mPendingReprocessResultList.erase(j);
2191            break;
2192        }
2193    }
2194    return NO_ERROR;
2195}
2196
2197/*===========================================================================
2198 * FUNCTION   : handleBatchMetadata
2199 *
2200 * DESCRIPTION: Handles metadata buffer callback in batch mode
2201 *
2202 * PARAMETERS : @metadata_buf: metadata buffer
2203 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2204 *                 the meta buf in this method
2205 *
2206 * RETURN     :
2207 *
2208 *==========================================================================*/
2209void QCamera3HardwareInterface::handleBatchMetadata(
2210        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2211{
2212    ATRACE_CALL();
2213
2214    if (NULL == metadata_buf) {
2215        ALOGE("%s: metadata_buf is NULL", __func__);
2216        return;
2217    }
2218    /* In batch mode, the metdata will contain the frame number and timestamp of
2219     * the last frame in the batch. Eg: a batch containing buffers from request
2220     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2221     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2222     * multiple process_capture_results */
2223    metadata_buffer_t *metadata =
2224            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2225    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2226    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2227    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2228    uint32_t frame_number = 0, urgent_frame_number = 0;
2229    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2230    bool invalid_metadata = false;
2231    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2232    size_t loopCount = 1;
2233
2234    int32_t *p_frame_number_valid =
2235            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2236    uint32_t *p_frame_number =
2237            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2238    int64_t *p_capture_time =
2239            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2240    int32_t *p_urgent_frame_number_valid =
2241            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2242    uint32_t *p_urgent_frame_number =
2243            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2244
2245    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2246            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2247            (NULL == p_urgent_frame_number)) {
2248        ALOGE("%s: Invalid metadata", __func__);
2249        invalid_metadata = true;
2250    } else {
2251        frame_number_valid = *p_frame_number_valid;
2252        last_frame_number = *p_frame_number;
2253        last_frame_capture_time = *p_capture_time;
2254        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2255        last_urgent_frame_number = *p_urgent_frame_number;
2256    }
2257
2258    /* In batchmode, when no video buffers are requested, set_parms are sent
2259     * for every capture_request. The difference between consecutive urgent
2260     * frame numbers and frame numbers should be used to interpolate the
2261     * corresponding frame numbers and time stamps */
2262    pthread_mutex_lock(&mMutex);
2263    if (urgent_frame_number_valid) {
2264        first_urgent_frame_number =
2265                mPendingBatchMap.valueFor(last_urgent_frame_number);
2266        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2267                first_urgent_frame_number;
2268
2269        CDBG("%s: urgent_frm: valid: %d frm_num: %d - %d",
2270                __func__, urgent_frame_number_valid,
2271                first_urgent_frame_number, last_urgent_frame_number);
2272    }
2273
2274    if (frame_number_valid) {
2275        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2276        frameNumDiff = last_frame_number + 1 -
2277                first_frame_number;
2278        mPendingBatchMap.removeItem(last_frame_number);
2279
2280        CDBG("%s:        frm: valid: %d frm_num: %d - %d",
2281                __func__, frame_number_valid,
2282                first_frame_number, last_frame_number);
2283
2284    }
2285    pthread_mutex_unlock(&mMutex);
2286
2287    if (urgent_frame_number_valid || frame_number_valid) {
2288        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2289        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2290            ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
2291                    __func__, urgentFrameNumDiff, last_urgent_frame_number);
2292        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2293            ALOGE("%s: frameNumDiff: %d frameNum: %d",
2294                    __func__, frameNumDiff, last_frame_number);
2295    }
2296
2297    for (size_t i = 0; i < loopCount; i++) {
2298        /* handleMetadataWithLock is called even for invalid_metadata for
2299         * pipeline depth calculation */
2300        if (!invalid_metadata) {
2301            /* Infer frame number. Batch metadata contains frame number of the
2302             * last frame */
2303            if (urgent_frame_number_valid) {
2304                if (i < urgentFrameNumDiff) {
2305                    urgent_frame_number =
2306                            first_urgent_frame_number + i;
2307                    CDBG("%s: inferred urgent frame_number: %d",
2308                            __func__, urgent_frame_number);
2309                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2310                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2311                } else {
2312                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2313                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2314                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2315                }
2316            }
2317
2318            /* Infer frame number. Batch metadata contains frame number of the
2319             * last frame */
2320            if (frame_number_valid) {
2321                if (i < frameNumDiff) {
2322                    frame_number = first_frame_number + i;
2323                    CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2324                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2325                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2326                } else {
2327                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2328                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2329                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2330                }
2331            }
2332
2333            if (last_frame_capture_time) {
2334                //Infer timestamp
2335                first_frame_capture_time = last_frame_capture_time -
2336                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2337                capture_time =
2338                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2339                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2340                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2341                CDBG("%s: batch capture_time: %lld, capture_time: %lld",
2342                        __func__, last_frame_capture_time, capture_time);
2343            }
2344        }
2345        pthread_mutex_lock(&mMutex);
2346        handleMetadataWithLock(metadata_buf,
2347                false /* free_and_bufdone_meta_buf */);
2348        pthread_mutex_unlock(&mMutex);
2349    }
2350
2351done_batch_metadata:
2352    /* BufDone metadata buffer */
2353    if (free_and_bufdone_meta_buf) {
2354        mMetadataChannel->bufDone(metadata_buf);
2355        free(metadata_buf);
2356    }
2357}
2358
2359/*===========================================================================
2360 * FUNCTION   : handleMetadataWithLock
2361 *
2362 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2363 *
2364 * PARAMETERS : @metadata_buf: metadata buffer
2365 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2366 *                 the meta buf in this method
2367 *
2368 * RETURN     :
2369 *
2370 *==========================================================================*/
2371void QCamera3HardwareInterface::handleMetadataWithLock(
2372    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2373{
2374    ATRACE_CALL();
2375
2376    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2377    int32_t frame_number_valid, urgent_frame_number_valid;
2378    uint32_t frame_number, urgent_frame_number;
2379    int64_t capture_time;
2380
2381    int32_t *p_frame_number_valid =
2382            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2383    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2384    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2385    int32_t *p_urgent_frame_number_valid =
2386            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2387    uint32_t *p_urgent_frame_number =
2388            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2389    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2390            metadata) {
2391        CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2392                __func__, *p_frame_number_valid, *p_frame_number);
2393    }
2394
2395    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2396            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2397        ALOGE("%s: Invalid metadata", __func__);
2398        if (free_and_bufdone_meta_buf) {
2399            mMetadataChannel->bufDone(metadata_buf);
2400            free(metadata_buf);
2401        }
2402        goto done_metadata;
2403    } else {
2404        frame_number_valid = *p_frame_number_valid;
2405        frame_number = *p_frame_number;
2406        capture_time = *p_capture_time;
2407        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2408        urgent_frame_number = *p_urgent_frame_number;
2409    }
2410    //Partial result on process_capture_result for timestamp
2411    if (urgent_frame_number_valid) {
2412        CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2413          __func__, urgent_frame_number, capture_time);
2414
2415        //Recieved an urgent Frame Number, handle it
2416        //using partial results
2417        for (pendingRequestIterator i =
2418                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2419            CDBG("%s: Iterator Frame = %d urgent frame = %d",
2420                __func__, i->frame_number, urgent_frame_number);
2421
2422            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2423                (i->partial_result_cnt == 0)) {
2424                ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2425                    __func__, i->frame_number);
2426            }
2427
2428            if (i->frame_number == urgent_frame_number &&
2429                     i->bUrgentReceived == 0) {
2430
2431                camera3_capture_result_t result;
2432                memset(&result, 0, sizeof(camera3_capture_result_t));
2433
2434                i->partial_result_cnt++;
2435                i->bUrgentReceived = 1;
2436                // Extract 3A metadata
2437                result.result =
2438                    translateCbUrgentMetadataToResultMetadata(metadata);
2439                // Populate metadata result
2440                result.frame_number = urgent_frame_number;
2441                result.num_output_buffers = 0;
2442                result.output_buffers = NULL;
2443                result.partial_result = i->partial_result_cnt;
2444
2445                mCallbackOps->process_capture_result(mCallbackOps, &result);
2446                CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2447                     __func__, result.frame_number, capture_time);
2448                free_camera_metadata((camera_metadata_t *)result.result);
2449                break;
2450            }
2451        }
2452    }
2453
2454    if (!frame_number_valid) {
2455        CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2456        if (free_and_bufdone_meta_buf) {
2457            mMetadataChannel->bufDone(metadata_buf);
2458            free(metadata_buf);
2459        }
2460        goto done_metadata;
2461    }
2462    CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2463            frame_number, capture_time);
2464
2465    for (pendingRequestIterator i = mPendingRequestsList.begin();
2466            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2467        // Flush out all entries with less or equal frame numbers.
2468
2469        camera3_capture_result_t result;
2470        memset(&result, 0, sizeof(camera3_capture_result_t));
2471
2472        CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2473        i->partial_result_cnt++;
2474        result.partial_result = i->partial_result_cnt;
2475
2476        // Check whether any stream buffer corresponding to this is dropped or not
2477        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2478        // The API does not expect a blob buffer to be dropped
2479        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2480            /* Clear notify_msg structure */
2481            camera3_notify_msg_t notify_msg;
2482            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2483            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2484                    j != i->buffers.end(); j++) {
2485               if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2486                   QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2487                   uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2488                   for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2489                       if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2490                           // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2491                           ALOGW("%s: Start of reporting error frame#=%u, streamID=%u",
2492                                   __func__, i->frame_number, streamID);
2493                           notify_msg.type = CAMERA3_MSG_ERROR;
2494                           notify_msg.message.error.frame_number = i->frame_number;
2495                           notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2496                           notify_msg.message.error.error_stream = j->stream;
2497                           mCallbackOps->notify(mCallbackOps, &notify_msg);
2498                           ALOGW("%s: End of reporting error frame#=%u, streamID=%u",
2499                                  __func__, i->frame_number, streamID);
2500                           PendingFrameDropInfo PendingFrameDrop;
2501                           PendingFrameDrop.frame_number=i->frame_number;
2502                           PendingFrameDrop.stream_ID = streamID;
2503                           // Add the Frame drop info to mPendingFrameDropList
2504                           mPendingFrameDropList.push_back(PendingFrameDrop);
2505                      }
2506                   }
2507               } else {
2508                   ALOGE("%s: JPEG buffer dropped for frame number %d",
2509                           __func__, i->frame_number);
2510               }
2511            }
2512        }
2513
2514        //TODO: batch handling for dropped metadata
2515
2516        // Send empty metadata with already filled buffers for dropped metadata
2517        // and send valid metadata with already filled buffers for current metadata
2518        /* we could hit this case when we either
2519         * 1. have a pending reprocess request or
2520         * 2. miss a metadata buffer callback */
2521        if (i->frame_number < frame_number) {
2522            /* Clear notify_msg structure */
2523            camera3_notify_msg_t notify_msg;
2524            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2525            notify_msg.type = CAMERA3_MSG_SHUTTER;
2526            notify_msg.message.shutter.frame_number = i->frame_number;
2527            notify_msg.message.shutter.timestamp = (uint64_t)capture_time -
2528                        (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
2529            if (i->input_buffer) {
2530                i->partial_result_cnt++; //input request will not have urgent metadata
2531                CameraMetadata settings;
2532                if(i->settings) {
2533                    settings = i->settings;
2534                    if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2535                        nsecs_t input_capture_time =
2536                                settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2537                        notify_msg.message.shutter.timestamp = (uint64_t)input_capture_time;
2538                    } else {
2539                        ALOGE("%s: No timestamp in input settings! Using current one.",
2540                                __func__);
2541                    }
2542                } else {
2543                    ALOGE("%s: Input settings missing!", __func__);
2544                }
2545                result.result = settings.release();
2546                result.partial_result = i->partial_result_cnt;
2547                CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2548                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2549            } else {
2550                mPendingLiveRequest--;
2551                CameraMetadata dummyMetadata;
2552                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
2553                        &i->timestamp, 1);
2554                dummyMetadata.update(ANDROID_REQUEST_ID,
2555                        &(i->request_id), 1);
2556                result.result = dummyMetadata.release();
2557            }
2558            mCallbackOps->notify(mCallbackOps, &notify_msg);
2559            i->timestamp = (nsecs_t)notify_msg.message.shutter.timestamp;
2560            CDBG("%s: Support notification !!!! notify frame_number = %u, capture_time = %llu",
2561                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2562        } else {
2563            mPendingLiveRequest--;
2564            /* Clear notify_msg structure */
2565            camera3_notify_msg_t notify_msg;
2566            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2567
2568            // Send shutter notify to frameworks
2569            notify_msg.type = CAMERA3_MSG_SHUTTER;
2570            notify_msg.message.shutter.frame_number = i->frame_number;
2571            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2572            mCallbackOps->notify(mCallbackOps, &notify_msg);
2573
2574            i->timestamp = capture_time;
2575
2576            // Find channel requiring metadata, meaning internal offline postprocess
2577            // is needed.
2578            //TODO: for now, we don't support two streams requiring metadata at the same time.
2579            // (because we are not making copies, and metadata buffer is not reference counted.
2580            bool internalPproc = false;
2581            for (pendingBufferIterator iter = i->buffers.begin();
2582                    iter != i->buffers.end(); iter++) {
2583                if (iter->need_metadata) {
2584                    internalPproc = true;
2585                    QCamera3ProcessingChannel *channel =
2586                            (QCamera3ProcessingChannel *)iter->stream->priv;
2587                    channel->queueReprocMetadata(metadata_buf);
2588                    break;
2589                }
2590            }
2591
2592            result.result = translateFromHalMetadata(metadata,
2593                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2594                    i->capture_intent, internalPproc);
2595
2596            saveExifParams(metadata);
2597
2598            if (i->blob_request) {
2599                {
2600                    //Dump tuning metadata if enabled and available
2601                    char prop[PROPERTY_VALUE_MAX];
2602                    memset(prop, 0, sizeof(prop));
2603                    property_get("persist.camera.dumpmetadata", prop, "0");
2604                    int32_t enabled = atoi(prop);
2605                    if (enabled && metadata->is_tuning_params_valid) {
2606                        dumpMetadataToFile(metadata->tuning_params,
2607                               mMetaFrameCount,
2608                               enabled,
2609                               "Snapshot",
2610                               frame_number);
2611                    }
2612                }
2613            }
2614
2615            if (!internalPproc) {
2616                CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2617                // Return metadata buffer
2618                if (free_and_bufdone_meta_buf) {
2619                    mMetadataChannel->bufDone(metadata_buf);
2620                    free(metadata_buf);
2621                }
2622            }
2623        }
2624        if (!result.result) {
2625            ALOGE("%s: metadata is NULL", __func__);
2626        }
2627        result.frame_number = i->frame_number;
2628        result.input_buffer = i->input_buffer;
2629        result.num_output_buffers = 0;
2630        result.output_buffers = NULL;
2631        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2632                    j != i->buffers.end(); j++) {
2633            if (j->buffer) {
2634                result.num_output_buffers++;
2635            }
2636        }
2637
2638        if (result.num_output_buffers > 0) {
2639            camera3_stream_buffer_t *result_buffers =
2640                new camera3_stream_buffer_t[result.num_output_buffers];
2641            if (!result_buffers) {
2642                ALOGE("%s: Fatal error: out of memory", __func__);
2643            }
2644            size_t result_buffers_idx = 0;
2645            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2646                    j != i->buffers.end(); j++) {
2647                if (j->buffer) {
2648                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2649                            m != mPendingFrameDropList.end(); m++) {
2650                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2651                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2652                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2653                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2654                            ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2655                                  __func__, frame_number, streamID);
2656                            m = mPendingFrameDropList.erase(m);
2657                            break;
2658                        }
2659                    }
2660
2661                    for (List<PendingBufferInfo>::iterator k =
2662                      mPendingBuffersMap.mPendingBufferList.begin();
2663                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2664                      if (k->buffer == j->buffer->buffer) {
2665                        CDBG("%s: Found buffer %p in pending buffer List "
2666                              "for frame %u, Take it out!!", __func__,
2667                               k->buffer, k->frame_number);
2668                        mPendingBuffersMap.num_buffers--;
2669                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2670                        break;
2671                      }
2672                    }
2673
2674                    result_buffers[result_buffers_idx++] = *(j->buffer);
2675                    free(j->buffer);
2676                    j->buffer = NULL;
2677                }
2678            }
2679            result.output_buffers = result_buffers;
2680            mCallbackOps->process_capture_result(mCallbackOps, &result);
2681            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2682                    __func__, __LINE__, result.frame_number, i->timestamp);
2683            free_camera_metadata((camera_metadata_t *)result.result);
2684            delete[] result_buffers;
2685        } else {
2686            mCallbackOps->process_capture_result(mCallbackOps, &result);
2687            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2688                        __func__, __LINE__, result.frame_number, i->timestamp);
2689            free_camera_metadata((camera_metadata_t *)result.result);
2690        }
2691        // erase the element from the list
2692        i = erasePendingRequest(i);
2693
2694        if (!mPendingReprocessResultList.empty()) {
2695            handlePendingReprocResults(frame_number + 1);
2696        }
2697    }
2698
2699done_metadata:
2700    for (pendingRequestIterator i = mPendingRequestsList.begin();
2701            i != mPendingRequestsList.end() ;i++) {
2702        i->pipeline_depth++;
2703    }
2704    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2705    unblockRequestIfNecessary();
2706
2707}
2708
2709/*===========================================================================
2710 * FUNCTION   : hdrPlusPerfLock
2711 *
2712 * DESCRIPTION: perf lock for HDR+ using custom intent
2713 *
2714 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2715 *
2716 * RETURN     : None
2717 *
2718 *==========================================================================*/
2719void QCamera3HardwareInterface::hdrPlusPerfLock(
2720        mm_camera_super_buf_t *metadata_buf)
2721{
2722    if (NULL == metadata_buf) {
2723        ALOGE("%s: metadata_buf is NULL", __func__);
2724        return;
2725    }
2726    metadata_buffer_t *metadata =
2727            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2728    int32_t *p_frame_number_valid =
2729            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2730    uint32_t *p_frame_number =
2731            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2732
2733    //acquire perf lock for 5 sec after the last HDR frame is captured
2734    if (*p_frame_number_valid) {
2735        if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2736            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2737        }
2738    }
2739
2740    //release lock after perf lock timer is expired. If lock is already released,
2741    //isTimerReset returns false
2742    if (m_perfLock.isTimerReset()) {
2743        mLastCustIntentFrmNum = -1;
2744        m_perfLock.lock_rel_timed();
2745    }
2746}
2747/*===========================================================================
2748 * FUNCTION   : handleBufferWithLock
2749 *
2750 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2751 *
2752 * PARAMETERS : @buffer: image buffer for the callback
2753 *              @frame_number: frame number of the image buffer
2754 *
2755 * RETURN     :
2756 *
2757 *==========================================================================*/
2758void QCamera3HardwareInterface::handleBufferWithLock(
2759    camera3_stream_buffer_t *buffer, uint32_t frame_number)
2760{
2761    ATRACE_CALL();
2762    // If the frame number doesn't exist in the pending request list,
2763    // directly send the buffer to the frameworks, and update pending buffers map
2764    // Otherwise, book-keep the buffer.
2765    pendingRequestIterator i = mPendingRequestsList.begin();
2766    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2767        i++;
2768    }
2769    if (i == mPendingRequestsList.end()) {
2770        // Verify all pending requests frame_numbers are greater
2771        for (pendingRequestIterator j = mPendingRequestsList.begin();
2772                j != mPendingRequestsList.end(); j++) {
2773            if (j->frame_number < frame_number) {
2774                ALOGE("%s: Error: pending frame number %d is smaller than %d",
2775                        __func__, j->frame_number, frame_number);
2776            }
2777        }
2778        camera3_capture_result_t result;
2779        memset(&result, 0, sizeof(camera3_capture_result_t));
2780        result.result = NULL;
2781        result.frame_number = frame_number;
2782        result.num_output_buffers = 1;
2783        result.partial_result = 0;
2784        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2785                m != mPendingFrameDropList.end(); m++) {
2786            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
2787            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2788            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
2789                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2790                CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
2791                        __func__, frame_number, streamID);
2792                m = mPendingFrameDropList.erase(m);
2793                break;
2794            }
2795        }
2796        result.output_buffers = buffer;
2797        CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
2798                __func__, frame_number, buffer->buffer);
2799
2800        for (List<PendingBufferInfo>::iterator k =
2801                mPendingBuffersMap.mPendingBufferList.begin();
2802                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2803            if (k->buffer == buffer->buffer) {
2804                CDBG("%s: Found Frame buffer, take it out from list",
2805                        __func__);
2806
2807                mPendingBuffersMap.num_buffers--;
2808                k = mPendingBuffersMap.mPendingBufferList.erase(k);
2809                break;
2810            }
2811        }
2812        CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2813            __func__, mPendingBuffersMap.num_buffers);
2814
2815        mCallbackOps->process_capture_result(mCallbackOps, &result);
2816    } else {
2817        if (i->input_buffer) {
2818            CameraMetadata settings;
2819            camera3_notify_msg_t notify_msg;
2820            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2821            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2822            if(i->settings) {
2823                settings = i->settings;
2824                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2825                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2826                } else {
2827                    ALOGE("%s: No timestamp in input settings! Using current one.",
2828                            __func__);
2829                }
2830            } else {
2831                ALOGE("%s: Input settings missing!", __func__);
2832            }
2833
2834            notify_msg.type = CAMERA3_MSG_SHUTTER;
2835            notify_msg.message.shutter.frame_number = frame_number;
2836            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2837
2838            if (i->input_buffer->release_fence != -1) {
2839               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2840               close(i->input_buffer->release_fence);
2841               if (rc != OK) {
2842               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2843               }
2844            }
2845
2846            for (List<PendingBufferInfo>::iterator k =
2847                    mPendingBuffersMap.mPendingBufferList.begin();
2848                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2849                if (k->buffer == buffer->buffer) {
2850                    CDBG("%s: Found Frame buffer, take it out from list",
2851                            __func__);
2852
2853                    mPendingBuffersMap.num_buffers--;
2854                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
2855                    break;
2856                }
2857            }
2858            CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2859                __func__, mPendingBuffersMap.num_buffers);
2860
2861            bool notifyNow = true;
2862            for (pendingRequestIterator j = mPendingRequestsList.begin();
2863                    j != mPendingRequestsList.end(); j++) {
2864                if (j->frame_number < frame_number) {
2865                    notifyNow = false;
2866                    break;
2867                }
2868            }
2869
2870            if (notifyNow) {
2871                camera3_capture_result result;
2872                memset(&result, 0, sizeof(camera3_capture_result));
2873                result.frame_number = frame_number;
2874                result.result = i->settings;
2875                result.input_buffer = i->input_buffer;
2876                result.num_output_buffers = 1;
2877                result.output_buffers = buffer;
2878                result.partial_result = PARTIAL_RESULT_COUNT;
2879
2880                mCallbackOps->notify(mCallbackOps, &notify_msg);
2881                mCallbackOps->process_capture_result(mCallbackOps, &result);
2882                CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2883                i = erasePendingRequest(i);
2884            } else {
2885                // Cache reprocess result for later
2886                PendingReprocessResult pendingResult;
2887                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2888                pendingResult.notify_msg = notify_msg;
2889                pendingResult.buffer = *buffer;
2890                pendingResult.frame_number = frame_number;
2891                mPendingReprocessResultList.push_back(pendingResult);
2892                CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2893            }
2894        } else {
2895            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2896                j != i->buffers.end(); j++) {
2897                if (j->stream == buffer->stream) {
2898                    if (j->buffer != NULL) {
2899                        ALOGE("%s: Error: buffer is already set", __func__);
2900                    } else {
2901                        j->buffer = (camera3_stream_buffer_t *)malloc(
2902                            sizeof(camera3_stream_buffer_t));
2903                        *(j->buffer) = *buffer;
2904                        CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
2905                            __func__, buffer, frame_number);
2906                    }
2907                }
2908            }
2909        }
2910    }
2911}
2912
2913/*===========================================================================
2914 * FUNCTION   : unblockRequestIfNecessary
2915 *
2916 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2917 *              that mMutex is held when this function is called.
2918 *
2919 * PARAMETERS :
2920 *
2921 * RETURN     :
2922 *
2923 *==========================================================================*/
2924void QCamera3HardwareInterface::unblockRequestIfNecessary()
2925{
2926   // Unblock process_capture_request
2927   pthread_cond_signal(&mRequestCond);
2928}
2929
2930
2931/*===========================================================================
2932 * FUNCTION   : processCaptureRequest
2933 *
2934 * DESCRIPTION: process a capture request from camera service
2935 *
2936 * PARAMETERS :
2937 *   @request : request from framework to process
2938 *
2939 * RETURN     :
2940 *
2941 *==========================================================================*/
2942int QCamera3HardwareInterface::processCaptureRequest(
2943                    camera3_capture_request_t *request)
2944{
2945    ATRACE_CALL();
2946    int rc = NO_ERROR;
2947    int32_t request_id;
2948    CameraMetadata meta;
2949    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
2950    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
2951    bool isVidBufRequested = false;
2952    camera3_stream_buffer_t *pInputBuffer = NULL;
2953
2954    pthread_mutex_lock(&mMutex);
2955
2956    rc = validateCaptureRequest(request);
2957    if (rc != NO_ERROR) {
2958        ALOGE("%s: incoming request is not valid", __func__);
2959        pthread_mutex_unlock(&mMutex);
2960        return rc;
2961    }
2962
2963    meta = request->settings;
2964
2965    // For first capture request, send capture intent, and
2966    // stream on all streams
2967    if (mFirstRequest) {
2968        // send an unconfigure to the backend so that the isp
2969        // resources are deallocated
2970        if (!mFirstConfiguration) {
2971            cam_stream_size_info_t stream_config_info;
2972            int32_t hal_version = CAM_HAL_V3;
2973            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
2974            stream_config_info.buffer_info.min_buffers =
2975                    MIN_INFLIGHT_REQUESTS;
2976            stream_config_info.buffer_info.max_buffers =
2977                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2978            clear_metadata_buffer(mParameters);
2979            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2980                    CAM_INTF_PARM_HAL_VERSION, hal_version);
2981            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2982                    CAM_INTF_META_STREAM_INFO, stream_config_info);
2983            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2984                    mParameters);
2985            if (rc < 0) {
2986                ALOGE("%s: set_parms for unconfigure failed", __func__);
2987                pthread_mutex_unlock(&mMutex);
2988                return rc;
2989            }
2990        }
2991        m_perfLock.lock_acq();
2992        /* get eis information for stream configuration */
2993        cam_is_type_t is_type;
2994        char is_type_value[PROPERTY_VALUE_MAX];
2995        property_get("persist.camera.is_type", is_type_value, "0");
2996        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2997
2998        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2999            int32_t hal_version = CAM_HAL_V3;
3000            uint8_t captureIntent =
3001                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3002            mCaptureIntent = captureIntent;
3003            clear_metadata_buffer(mParameters);
3004            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3005            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3006        }
3007
3008        //If EIS is enabled, turn it on for video
3009        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3010        int32_t vsMode;
3011        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3012        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3013            rc = BAD_VALUE;
3014        }
3015
3016        //IS type will be 0 unless EIS is supported. If EIS is supported
3017        //it could either be 1 or 4 depending on the stream and video size
3018        if (setEis) {
3019            if (!m_bEisSupportedSize) {
3020                is_type = IS_TYPE_DIS;
3021            } else {
3022                is_type = IS_TYPE_EIS_2_0;
3023            }
3024            mStreamConfigInfo.is_type = is_type;
3025        } else {
3026            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3027        }
3028
3029        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3030                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3031        int32_t tintless_value = 1;
3032        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3033                CAM_INTF_PARM_TINTLESS, tintless_value);
3034        //Disable CDS for HFR mode and if mPprocBypass = true.
3035        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3036        //after every configure_stream
3037        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3038                (m_bIsVideo)) {
3039            int32_t cds = CAM_CDS_MODE_OFF;
3040            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3041                    CAM_INTF_PARM_CDS_MODE, cds))
3042                ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3043
3044        }
3045        setMobicat();
3046
3047        /* Set fps and hfr mode while sending meta stream info so that sensor
3048         * can configure appropriate streaming mode */
3049        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3050        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3051            rc = setHalFpsRange(meta, mParameters);
3052            if (rc != NO_ERROR) {
3053                ALOGE("%s: setHalFpsRange failed", __func__);
3054            }
3055        }
3056        if (meta.exists(ANDROID_CONTROL_MODE)) {
3057            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3058            rc = extractSceneMode(meta, metaMode, mParameters);
3059            if (rc != NO_ERROR) {
3060                ALOGE("%s: extractSceneMode failed", __func__);
3061            }
3062        }
3063
3064        //TODO: validate the arguments, HSV scenemode should have only the
3065        //advertised fps ranges
3066
3067        /*set the capture intent, hal version, tintless, stream info,
3068         *and disenable parameters to the backend*/
3069        CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3070        mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3071                    mParameters);
3072
3073        cam_dimension_t sensor_dim;
3074        memset(&sensor_dim, 0, sizeof(sensor_dim));
3075        rc = getSensorOutputSize(sensor_dim);
3076        if (rc != NO_ERROR) {
3077            ALOGE("%s: Failed to get sensor output size", __func__);
3078            pthread_mutex_unlock(&mMutex);
3079            goto error_exit;
3080        }
3081
3082        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3083                gCamCapability[mCameraId]->active_array_size.height,
3084                sensor_dim.width, sensor_dim.height);
3085
3086        /* Set batchmode before initializing channel. Since registerBuffer
3087         * internally initializes some of the channels, better set batchmode
3088         * even before first register buffer */
3089        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3090            it != mStreamInfo.end(); it++) {
3091            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3092            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3093                    && mBatchSize) {
3094                rc = channel->setBatchSize(mBatchSize);
3095                //Disable per frame map unmap for HFR/batchmode case
3096                rc |= channel->setPerFrameMapUnmap(false);
3097                if (NO_ERROR != rc) {
3098                    ALOGE("%s : Channel init failed %d", __func__, rc);
3099                    pthread_mutex_unlock(&mMutex);
3100                    goto error_exit;
3101                }
3102            }
3103        }
3104
3105        //First initialize all streams
3106        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3107            it != mStreamInfo.end(); it++) {
3108            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3109            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3110               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3111               setEis)
3112                rc = channel->initialize(is_type);
3113            else {
3114                rc = channel->initialize(IS_TYPE_NONE);
3115            }
3116            if (NO_ERROR != rc) {
3117                ALOGE("%s : Channel initialization failed %d", __func__, rc);
3118                pthread_mutex_unlock(&mMutex);
3119                goto error_exit;
3120            }
3121        }
3122
3123        if (mRawDumpChannel) {
3124            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3125            if (rc != NO_ERROR) {
3126                ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3127                pthread_mutex_unlock(&mMutex);
3128                goto error_exit;
3129            }
3130        }
3131        if (mSupportChannel) {
3132            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3133            if (rc < 0) {
3134                ALOGE("%s: Support channel initialization failed", __func__);
3135                pthread_mutex_unlock(&mMutex);
3136                goto error_exit;
3137            }
3138        }
3139        if (mAnalysisChannel) {
3140            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3141            if (rc < 0) {
3142                ALOGE("%s: Analysis channel initialization failed", __func__);
3143                pthread_mutex_unlock(&mMutex);
3144                goto error_exit;
3145            }
3146        }
3147        if (mDummyBatchChannel) {
3148            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3149            if (rc < 0) {
3150                ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3151                pthread_mutex_unlock(&mMutex);
3152                goto error_exit;
3153            }
3154            rc = mDummyBatchChannel->initialize(is_type);
3155            if (rc < 0) {
3156                ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3157                pthread_mutex_unlock(&mMutex);
3158                goto error_exit;
3159            }
3160        }
3161
3162        // Set bundle info
3163        rc = setBundleInfo();
3164        if (rc < 0) {
3165            ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3166            pthread_mutex_unlock(&mMutex);
3167            goto error_exit;
3168        }
3169
3170        //Then start them.
3171        CDBG_HIGH("%s: Start META Channel", __func__);
3172        rc = mMetadataChannel->start();
3173        if (rc < 0) {
3174            ALOGE("%s: META channel start failed", __func__);
3175            pthread_mutex_unlock(&mMutex);
3176            goto error_exit;
3177        }
3178
3179        if (mAnalysisChannel) {
3180            rc = mAnalysisChannel->start();
3181            if (rc < 0) {
3182                ALOGE("%s: Analysis channel start failed", __func__);
3183                mMetadataChannel->stop();
3184                pthread_mutex_unlock(&mMutex);
3185                goto error_exit;
3186            }
3187        }
3188
3189        if (mSupportChannel) {
3190            rc = mSupportChannel->start();
3191            if (rc < 0) {
3192                ALOGE("%s: Support channel start failed", __func__);
3193                mMetadataChannel->stop();
3194                /* Although support and analysis are mutually exclusive today
3195                   adding it in anycase for future proofing */
3196                if (mAnalysisChannel) {
3197                    mAnalysisChannel->stop();
3198                }
3199                pthread_mutex_unlock(&mMutex);
3200                goto error_exit;
3201            }
3202        }
3203        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3204            it != mStreamInfo.end(); it++) {
3205            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3206            CDBG_HIGH("%s: Start Processing Channel mask=%d",
3207                    __func__, channel->getStreamTypeMask());
3208            rc = channel->start();
3209            if (rc < 0) {
3210                ALOGE("%s: channel start failed", __func__);
3211                pthread_mutex_unlock(&mMutex);
3212                goto error_exit;
3213            }
3214        }
3215
3216        if (mRawDumpChannel) {
3217            CDBG("%s: Starting raw dump stream",__func__);
3218            rc = mRawDumpChannel->start();
3219            if (rc != NO_ERROR) {
3220                ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3221                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3222                      it != mStreamInfo.end(); it++) {
3223                    QCamera3Channel *channel =
3224                        (QCamera3Channel *)(*it)->stream->priv;
3225                    ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3226                        channel->getStreamTypeMask());
3227                    channel->stop();
3228                }
3229                if (mSupportChannel)
3230                    mSupportChannel->stop();
3231                if (mAnalysisChannel) {
3232                    mAnalysisChannel->stop();
3233                }
3234                mMetadataChannel->stop();
3235                pthread_mutex_unlock(&mMutex);
3236                goto error_exit;
3237            }
3238        }
3239
3240        if (mChannelHandle) {
3241
3242            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3243                    mChannelHandle);
3244            if (rc != NO_ERROR) {
3245                ALOGE("%s: start_channel failed %d", __func__, rc);
3246                pthread_mutex_unlock(&mMutex);
3247                goto error_exit;
3248            }
3249        }
3250
3251
3252        goto no_error;
3253error_exit:
3254        m_perfLock.lock_rel();
3255        return rc;
3256no_error:
3257        m_perfLock.lock_rel();
3258
3259        mWokenUpByDaemon = false;
3260        mPendingLiveRequest = 0;
3261        mFirstConfiguration = false;
3262        enablePowerHint();
3263    }
3264
3265    uint32_t frameNumber = request->frame_number;
3266    cam_stream_ID_t streamID;
3267
3268    if (meta.exists(ANDROID_REQUEST_ID)) {
3269        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3270        mCurrentRequestId = request_id;
3271        CDBG("%s: Received request with id: %d",__func__, request_id);
3272    } else if (mFirstRequest || mCurrentRequestId == -1){
3273        ALOGE("%s: Unable to find request id field, \
3274                & no previous id available", __func__);
3275        pthread_mutex_unlock(&mMutex);
3276        return NAME_NOT_FOUND;
3277    } else {
3278        CDBG("%s: Re-using old request id", __func__);
3279        request_id = mCurrentRequestId;
3280    }
3281
3282    CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3283                                    __func__, __LINE__,
3284                                    request->num_output_buffers,
3285                                    request->input_buffer,
3286                                    frameNumber);
3287    // Acquire all request buffers first
3288    streamID.num_streams = 0;
3289    int blob_request = 0;
3290    uint32_t snapshotStreamId = 0;
3291    for (size_t i = 0; i < request->num_output_buffers; i++) {
3292        const camera3_stream_buffer_t& output = request->output_buffers[i];
3293        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3294
3295        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3296            //Call function to store local copy of jpeg data for encode params.
3297            blob_request = 1;
3298            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3299        }
3300
3301        if (output.acquire_fence != -1) {
3302           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3303           close(output.acquire_fence);
3304           if (rc != OK) {
3305              ALOGE("%s: sync wait failed %d", __func__, rc);
3306              pthread_mutex_unlock(&mMutex);
3307              return rc;
3308           }
3309        }
3310
3311        streamID.streamID[streamID.num_streams] =
3312            channel->getStreamID(channel->getStreamTypeMask());
3313        streamID.num_streams++;
3314
3315        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3316            isVidBufRequested = true;
3317        }
3318    }
3319
3320    if (blob_request && mRawDumpChannel) {
3321        CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3322        streamID.streamID[streamID.num_streams] =
3323            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3324        streamID.num_streams++;
3325    }
3326
3327    if(request->input_buffer == NULL) {
3328        /* Parse the settings:
3329         * - For every request in NORMAL MODE
3330         * - For every request in HFR mode during preview only case
3331         * - For first request of every batch in HFR mode during video
3332         * recording. In batchmode the same settings except frame number is
3333         * repeated in each request of the batch.
3334         */
3335        if (!mBatchSize ||
3336           (mBatchSize && !isVidBufRequested) ||
3337           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3338            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3339            if (rc < 0) {
3340                ALOGE("%s: fail to set frame parameters", __func__);
3341                pthread_mutex_unlock(&mMutex);
3342                return rc;
3343            }
3344        }
3345        /* For batchMode HFR, setFrameParameters is not called for every
3346         * request. But only frame number of the latest request is parsed.
3347         * Keep track of first and last frame numbers in a batch so that
3348         * metadata for the frame numbers of batch can be duplicated in
3349         * handleBatchMetadta */
3350        if (mBatchSize) {
3351            if (!mToBeQueuedVidBufs) {
3352                //start of the batch
3353                mFirstFrameNumberInBatch = request->frame_number;
3354            }
3355            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3356                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3357                ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3358                return BAD_VALUE;
3359            }
3360        }
3361        if (mNeedSensorRestart) {
3362            /* Unlock the mutex as restartSensor waits on the channels to be
3363             * stopped, which in turn calls stream callback functions -
3364             * handleBufferWithLock and handleMetadataWithLock */
3365            pthread_mutex_unlock(&mMutex);
3366            rc = dynamicUpdateMetaStreamInfo();
3367            if (rc != NO_ERROR) {
3368                ALOGE("%s: Restarting the sensor failed", __func__);
3369                return BAD_VALUE;
3370            }
3371            mNeedSensorRestart = false;
3372            pthread_mutex_lock(&mMutex);
3373        }
3374    } else {
3375
3376        if (request->input_buffer->acquire_fence != -1) {
3377           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3378           close(request->input_buffer->acquire_fence);
3379           if (rc != OK) {
3380              ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3381              pthread_mutex_unlock(&mMutex);
3382              return rc;
3383           }
3384        }
3385    }
3386
3387    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3388        mLastCustIntentFrmNum = frameNumber;
3389    }
3390    /* Update pending request list and pending buffers map */
3391    PendingRequestInfo pendingRequest;
3392    pendingRequestIterator latestRequest;
3393    pendingRequest.frame_number = frameNumber;
3394    pendingRequest.num_buffers = request->num_output_buffers;
3395    pendingRequest.request_id = request_id;
3396    pendingRequest.blob_request = blob_request;
3397    pendingRequest.timestamp = 0;
3398    pendingRequest.bUrgentReceived = 0;
3399    if (request->input_buffer) {
3400        pendingRequest.input_buffer =
3401                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3402        *(pendingRequest.input_buffer) = *(request->input_buffer);
3403        pInputBuffer = pendingRequest.input_buffer;
3404    } else {
3405       pendingRequest.input_buffer = NULL;
3406       pInputBuffer = NULL;
3407    }
3408
3409    pendingRequest.pipeline_depth = 0;
3410    pendingRequest.partial_result_cnt = 0;
3411    extractJpegMetadata(mCurJpegMeta, request);
3412    pendingRequest.jpegMetadata = mCurJpegMeta;
3413    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3414
3415    //extract capture intent
3416    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3417        mCaptureIntent =
3418                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3419    }
3420    pendingRequest.capture_intent = mCaptureIntent;
3421
3422    for (size_t i = 0; i < request->num_output_buffers; i++) {
3423        RequestedBufferInfo requestedBuf;
3424        memset(&requestedBuf, 0, sizeof(requestedBuf));
3425        requestedBuf.stream = request->output_buffers[i].stream;
3426        requestedBuf.buffer = NULL;
3427        pendingRequest.buffers.push_back(requestedBuf);
3428
3429        // Add to buffer handle the pending buffers list
3430        PendingBufferInfo bufferInfo;
3431        bufferInfo.frame_number = frameNumber;
3432        bufferInfo.buffer = request->output_buffers[i].buffer;
3433        bufferInfo.stream = request->output_buffers[i].stream;
3434        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3435        mPendingBuffersMap.num_buffers++;
3436        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3437        CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3438                __func__, frameNumber, bufferInfo.buffer,
3439                channel->getStreamTypeMask(), bufferInfo.stream->format);
3440    }
3441    latestRequest = mPendingRequestsList.insert(
3442            mPendingRequestsList.end(), pendingRequest);
3443    if(mFlush) {
3444        pthread_mutex_unlock(&mMutex);
3445        return NO_ERROR;
3446    }
3447
3448    // Notify metadata channel we receive a request
3449    mMetadataChannel->request(NULL, frameNumber);
3450
3451    if(request->input_buffer != NULL){
3452        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3453        if (NO_ERROR != rc) {
3454            ALOGE("%s: fail to set reproc parameters", __func__);
3455            pthread_mutex_unlock(&mMutex);
3456            return rc;
3457        }
3458    }
3459
3460    // Call request on other streams
3461    uint32_t streams_need_metadata = 0;
3462    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3463    for (size_t i = 0; i < request->num_output_buffers; i++) {
3464        const camera3_stream_buffer_t& output = request->output_buffers[i];
3465        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3466
3467        if (channel == NULL) {
3468            ALOGE("%s: invalid channel pointer for stream", __func__);
3469            continue;
3470        }
3471
3472        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3473            if(request->input_buffer != NULL){
3474                rc = channel->request(output.buffer, frameNumber,
3475                        pInputBuffer, &mReprocMeta);
3476                if (rc < 0) {
3477                    ALOGE("%s: Fail to request on picture channel", __func__);
3478                    pthread_mutex_unlock(&mMutex);
3479                    return rc;
3480                }
3481            } else {
3482                CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3483                        __LINE__, output.buffer, frameNumber);
3484                if (!request->settings) {
3485                    rc = channel->request(output.buffer, frameNumber,
3486                            NULL, mPrevParameters);
3487                } else {
3488                    rc = channel->request(output.buffer, frameNumber,
3489                            NULL, mParameters);
3490                }
3491                if (rc < 0) {
3492                    ALOGE("%s: Fail to request on picture channel", __func__);
3493                    pthread_mutex_unlock(&mMutex);
3494                    return rc;
3495                }
3496                pendingBufferIter->need_metadata = true;
3497                streams_need_metadata++;
3498            }
3499        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3500            bool needMetadata = false;
3501            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3502            rc = yuvChannel->request(output.buffer, frameNumber,
3503                    pInputBuffer,
3504                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3505            if (rc < 0) {
3506                ALOGE("%s: Fail to request on YUV channel", __func__);
3507                pthread_mutex_unlock(&mMutex);
3508                return rc;
3509            }
3510            pendingBufferIter->need_metadata = needMetadata;
3511            if (needMetadata)
3512                streams_need_metadata += 1;
3513            CDBG("%s: calling YUV channel request, need_metadata is %d",
3514                    __func__, needMetadata);
3515        } else {
3516            CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3517                __LINE__, output.buffer, frameNumber);
3518            rc = channel->request(output.buffer, frameNumber);
3519            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3520                    && mBatchSize) {
3521                mToBeQueuedVidBufs++;
3522                if (mToBeQueuedVidBufs == mBatchSize) {
3523                    channel->queueBatchBuf();
3524                }
3525            }
3526            if (rc < 0) {
3527                ALOGE("%s: request failed", __func__);
3528                pthread_mutex_unlock(&mMutex);
3529                return rc;
3530            }
3531        }
3532        pendingBufferIter++;
3533    }
3534
3535    //If 2 streams have need_metadata set to true, fail the request, unless
3536    //we copy/reference count the metadata buffer
3537    if (streams_need_metadata > 1) {
3538        ALOGE("%s: not supporting request in which two streams requires"
3539                " 2 HAL metadata for reprocessing", __func__);
3540        pthread_mutex_unlock(&mMutex);
3541        return -EINVAL;
3542    }
3543
3544    if(request->input_buffer == NULL) {
3545        /* Set the parameters to backend:
3546         * - For every request in NORMAL MODE
3547         * - For every request in HFR mode during preview only case
3548         * - Once every batch in HFR mode during video recording
3549         */
3550        if (!mBatchSize ||
3551           (mBatchSize && !isVidBufRequested) ||
3552           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3553            CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3554                    __func__, mBatchSize, isVidBufRequested,
3555                    mToBeQueuedVidBufs);
3556            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3557                    mParameters);
3558            if (rc < 0) {
3559                ALOGE("%s: set_parms failed", __func__);
3560            }
3561            /* reset to zero coz, the batch is queued */
3562            mToBeQueuedVidBufs = 0;
3563            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3564        }
3565        mPendingLiveRequest++;
3566    }
3567
3568    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3569
3570    mFirstRequest = false;
3571    // Added a timed condition wait
3572    struct timespec ts;
3573    uint8_t isValidTimeout = 1;
3574    rc = clock_gettime(CLOCK_REALTIME, &ts);
3575    if (rc < 0) {
3576      isValidTimeout = 0;
3577      ALOGE("%s: Error reading the real time clock!!", __func__);
3578    }
3579    else {
3580      // Make timeout as 5 sec for request to be honored
3581      ts.tv_sec += 5;
3582    }
3583    //Block on conditional variable
3584    if (mBatchSize) {
3585        /* For HFR, more buffers are dequeued upfront to improve the performance */
3586        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3587        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3588    }
3589    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3590        if (!isValidTimeout) {
3591            CDBG("%s: Blocking on conditional wait", __func__);
3592            pthread_cond_wait(&mRequestCond, &mMutex);
3593        }
3594        else {
3595            CDBG("%s: Blocking on timed conditional wait", __func__);
3596            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3597            if (rc == ETIMEDOUT) {
3598                rc = -ENODEV;
3599                ALOGE("%s: Unblocked on timeout!!!!", __func__);
3600                break;
3601            }
3602        }
3603        CDBG("%s: Unblocked", __func__);
3604        if (mWokenUpByDaemon) {
3605            mWokenUpByDaemon = false;
3606            if (mPendingLiveRequest < maxInFlightRequests)
3607                break;
3608        }
3609    }
3610    pthread_mutex_unlock(&mMutex);
3611
3612    return rc;
3613}
3614
3615/*===========================================================================
3616 * FUNCTION   : dump
3617 *
3618 * DESCRIPTION:
3619 *
3620 * PARAMETERS :
3621 *
3622 *
3623 * RETURN     :
3624 *==========================================================================*/
3625void QCamera3HardwareInterface::dump(int fd)
3626{
3627    pthread_mutex_lock(&mMutex);
3628    dprintf(fd, "\n Camera HAL3 information Begin \n");
3629
3630    dprintf(fd, "\nNumber of pending requests: %zu \n",
3631        mPendingRequestsList.size());
3632    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3633    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3634    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3635    for(pendingRequestIterator i = mPendingRequestsList.begin();
3636            i != mPendingRequestsList.end(); i++) {
3637        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3638        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3639        i->input_buffer);
3640    }
3641    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3642                mPendingBuffersMap.num_buffers);
3643    dprintf(fd, "-------+------------------\n");
3644    dprintf(fd, " Frame | Stream type mask \n");
3645    dprintf(fd, "-------+------------------\n");
3646    for(List<PendingBufferInfo>::iterator i =
3647        mPendingBuffersMap.mPendingBufferList.begin();
3648        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3649        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3650        dprintf(fd, " %5d | %11d \n",
3651                i->frame_number, channel->getStreamTypeMask());
3652    }
3653    dprintf(fd, "-------+------------------\n");
3654
3655    dprintf(fd, "\nPending frame drop list: %zu\n",
3656        mPendingFrameDropList.size());
3657    dprintf(fd, "-------+-----------\n");
3658    dprintf(fd, " Frame | Stream ID \n");
3659    dprintf(fd, "-------+-----------\n");
3660    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3661        i != mPendingFrameDropList.end(); i++) {
3662        dprintf(fd, " %5d | %9d \n",
3663            i->frame_number, i->stream_ID);
3664    }
3665    dprintf(fd, "-------+-----------\n");
3666
3667    dprintf(fd, "\n Camera HAL3 information End \n");
3668
3669    /* use dumpsys media.camera as trigger to send update debug level event */
3670    mUpdateDebugLevel = true;
3671    pthread_mutex_unlock(&mMutex);
3672    return;
3673}
3674
3675/*===========================================================================
3676 * FUNCTION   : flush
3677 *
3678 * DESCRIPTION:
3679 *
3680 * PARAMETERS :
3681 *
3682 *
3683 * RETURN     :
3684 *==========================================================================*/
3685int QCamera3HardwareInterface::flush()
3686{
3687    ATRACE_CALL();
3688    int32_t rc = NO_ERROR;
3689
3690    CDBG("%s: Unblocking Process Capture Request", __func__);
3691    pthread_mutex_lock(&mMutex);
3692
3693    if (mFirstRequest) {
3694        pthread_mutex_unlock(&mMutex);
3695        return NO_ERROR;
3696    }
3697
3698    mFlush = true;
3699    pthread_mutex_unlock(&mMutex);
3700
3701    rc = stopAllChannels();
3702    if (rc < 0) {
3703        ALOGE("%s: stopAllChannels failed", __func__);
3704        return rc;
3705    }
3706    if (mChannelHandle) {
3707        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
3708                mChannelHandle);
3709    }
3710
3711    // Reset bundle info
3712    rc = setBundleInfo();
3713    if (rc < 0) {
3714        ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3715        return rc;
3716    }
3717
3718    // Mutex Lock
3719    pthread_mutex_lock(&mMutex);
3720
3721    // Unblock process_capture_request
3722    mPendingLiveRequest = 0;
3723    pthread_cond_signal(&mRequestCond);
3724
3725    rc = notifyErrorForPendingRequests();
3726    if (rc < 0) {
3727        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3728        pthread_mutex_unlock(&mMutex);
3729        return rc;
3730    }
3731
3732    mFlush = false;
3733
3734    // Start the Streams/Channels
3735    rc = startAllChannels();
3736    if (rc < 0) {
3737        ALOGE("%s: startAllChannels failed", __func__);
3738        pthread_mutex_unlock(&mMutex);
3739        return rc;
3740    }
3741
3742    if (mChannelHandle) {
3743        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3744                    mChannelHandle);
3745        if (rc < 0) {
3746            ALOGE("%s: start_channel failed", __func__);
3747            pthread_mutex_unlock(&mMutex);
3748            return rc;
3749        }
3750    }
3751
3752    pthread_mutex_unlock(&mMutex);
3753
3754    return 0;
3755}
3756
3757/*===========================================================================
3758 * FUNCTION   : captureResultCb
3759 *
3760 * DESCRIPTION: Callback handler for all capture result
3761 *              (streams, as well as metadata)
3762 *
3763 * PARAMETERS :
3764 *   @metadata : metadata information
3765 *   @buffer   : actual gralloc buffer to be returned to frameworks.
3766 *               NULL if metadata.
3767 *
3768 * RETURN     : NONE
3769 *==========================================================================*/
3770void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
3771                camera3_stream_buffer_t *buffer, uint32_t frame_number)
3772{
3773    if (metadata_buf) {
3774        if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
3775            handleBatchMetadata(metadata_buf,
3776                    true /* free_and_bufdone_meta_buf */);
3777        } else { /* mBatchSize = 0 */
3778            hdrPlusPerfLock(metadata_buf);
3779            pthread_mutex_lock(&mMutex);
3780            handleMetadataWithLock(metadata_buf,
3781                    true /* free_and_bufdone_meta_buf */);
3782            pthread_mutex_unlock(&mMutex);
3783        }
3784    } else {
3785        pthread_mutex_lock(&mMutex);
3786        handleBufferWithLock(buffer, frame_number);
3787        pthread_mutex_unlock(&mMutex);
3788    }
3789    return;
3790}
3791
3792/*===========================================================================
3793 * FUNCTION   : getReprocessibleOutputStreamId
3794 *
3795 * DESCRIPTION: Get source output stream id for the input reprocess stream
3796 *              based on size and format, which would be the largest
3797 *              output stream if an input stream exists.
3798 *
3799 * PARAMETERS :
3800 *   @id      : return the stream id if found
3801 *
3802 * RETURN     : int32_t type of status
3803 *              NO_ERROR  -- success
3804 *              none-zero failure code
3805 *==========================================================================*/
3806int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
3807{
3808    stream_info_t* stream = NULL;
3809
3810    /* check if any output or bidirectional stream with the same size and format
3811       and return that stream */
3812    if ((mInputStreamInfo.dim.width > 0) &&
3813            (mInputStreamInfo.dim.height > 0)) {
3814        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3815                it != mStreamInfo.end(); it++) {
3816
3817            camera3_stream_t *stream = (*it)->stream;
3818            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
3819                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
3820                    (stream->format == mInputStreamInfo.format)) {
3821                // Usage flag for an input stream and the source output stream
3822                // may be different.
3823                CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
3824                CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
3825                        __func__, stream->usage, mInputStreamInfo.usage);
3826
3827                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
3828                if (channel != NULL && channel->mStreams[0]) {
3829                    id = channel->mStreams[0]->getMyServerID();
3830                    return NO_ERROR;
3831                }
3832            }
3833        }
3834    } else {
3835        CDBG("%s: No input stream, so no reprocessible output stream", __func__);
3836    }
3837    return NAME_NOT_FOUND;
3838}
3839
3840/*===========================================================================
3841 * FUNCTION   : lookupFwkName
3842 *
3843 * DESCRIPTION: In case the enum is not same in fwk and backend
3844 *              make sure the parameter is correctly propogated
3845 *
3846 * PARAMETERS  :
3847 *   @arr      : map between the two enums
3848 *   @len      : len of the map
3849 *   @hal_name : name of the hal_parm to map
3850 *
3851 * RETURN     : int type of status
3852 *              fwk_name  -- success
3853 *              none-zero failure code
3854 *==========================================================================*/
3855template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
3856        size_t len, halType hal_name)
3857{
3858
3859    for (size_t i = 0; i < len; i++) {
3860        if (arr[i].hal_name == hal_name) {
3861            return arr[i].fwk_name;
3862        }
3863    }
3864
3865    /* Not able to find matching framework type is not necessarily
3866     * an error case. This happens when mm-camera supports more attributes
3867     * than the frameworks do */
3868    CDBG_HIGH("%s: Cannot find matching framework type", __func__);
3869    return NAME_NOT_FOUND;
3870}
3871
3872/*===========================================================================
3873 * FUNCTION   : lookupHalName
3874 *
3875 * DESCRIPTION: In case the enum is not same in fwk and backend
3876 *              make sure the parameter is correctly propogated
3877 *
3878 * PARAMETERS  :
3879 *   @arr      : map between the two enums
3880 *   @len      : len of the map
3881 *   @fwk_name : name of the hal_parm to map
3882 *
3883 * RETURN     : int32_t type of status
3884 *              hal_name  -- success
3885 *              none-zero failure code
3886 *==========================================================================*/
3887template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
3888        size_t len, fwkType fwk_name)
3889{
3890    for (size_t i = 0; i < len; i++) {
3891        if (arr[i].fwk_name == fwk_name) {
3892            return arr[i].hal_name;
3893        }
3894    }
3895
3896    ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
3897    return NAME_NOT_FOUND;
3898}
3899
3900/*===========================================================================
3901 * FUNCTION   : lookupProp
3902 *
3903 * DESCRIPTION: lookup a value by its name
3904 *
3905 * PARAMETERS :
3906 *   @arr     : map between the two enums
3907 *   @len     : size of the map
3908 *   @name    : name to be looked up
3909 *
3910 * RETURN     : Value if found
3911 *              CAM_CDS_MODE_MAX if not found
3912 *==========================================================================*/
3913template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
3914        size_t len, const char *name)
3915{
3916    if (name) {
3917        for (size_t i = 0; i < len; i++) {
3918            if (!strcmp(arr[i].desc, name)) {
3919                return arr[i].val;
3920            }
3921        }
3922    }
3923    return CAM_CDS_MODE_MAX;
3924}
3925
3926/*===========================================================================
3927 *
3928 * DESCRIPTION:
3929 *
3930 * PARAMETERS :
3931 *   @metadata : metadata information from callback
3932 *   @timestamp: metadata buffer timestamp
3933 *   @request_id: request id
3934 *   @jpegMetadata: additional jpeg metadata
3935 *   @pprocDone: whether internal offline postprocsesing is done
3936 *
3937 * RETURN     : camera_metadata_t*
3938 *              metadata in a format specified by fwk
3939 *==========================================================================*/
3940camera_metadata_t*
3941QCamera3HardwareInterface::translateFromHalMetadata(
3942                                 metadata_buffer_t *metadata,
3943                                 nsecs_t timestamp,
3944                                 int32_t request_id,
3945                                 const CameraMetadata& jpegMetadata,
3946                                 uint8_t pipeline_depth,
3947                                 uint8_t capture_intent,
3948                                 bool pprocDone)
3949{
3950    CameraMetadata camMetadata;
3951    camera_metadata_t *resultMetadata;
3952
3953    if (jpegMetadata.entryCount())
3954        camMetadata.append(jpegMetadata);
3955
3956    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
3957    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
3958    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
3959    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
3960
3961    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
3962        int64_t fwk_frame_number = *frame_number;
3963        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
3964    }
3965
3966    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
3967        int32_t fps_range[2];
3968        fps_range[0] = (int32_t)float_range->min_fps;
3969        fps_range[1] = (int32_t)float_range->max_fps;
3970        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3971                                      fps_range, 2);
3972        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
3973            __func__, fps_range[0], fps_range[1]);
3974    }
3975
3976    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
3977        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
3978    }
3979
3980    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
3981        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
3982                METADATA_MAP_SIZE(SCENE_MODES_MAP),
3983                *sceneMode);
3984        if (NAME_NOT_FOUND != val) {
3985            uint8_t fwkSceneMode = (uint8_t)val;
3986            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
3987            CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
3988                    __func__, fwkSceneMode);
3989        }
3990    }
3991
3992    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
3993        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
3994        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
3995    }
3996
3997    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
3998        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
3999        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4000    }
4001
4002    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4003        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4004        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4005    }
4006
4007    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4008            CAM_INTF_META_EDGE_MODE, metadata) {
4009        uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
4010        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4011    }
4012
4013    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4014        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4015        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4016    }
4017
4018    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4019        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4020    }
4021
4022    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4023        if (0 <= *flashState) {
4024            uint8_t fwk_flashState = (uint8_t) *flashState;
4025            if (!gCamCapability[mCameraId]->flash_available) {
4026                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4027            }
4028            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4029        }
4030    }
4031
4032    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4033        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4034        if (NAME_NOT_FOUND != val) {
4035            uint8_t fwk_flashMode = (uint8_t)val;
4036            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4037        }
4038    }
4039
4040    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4041        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4042        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4043    }
4044
4045    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4046        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4047    }
4048
4049    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4050        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4051    }
4052
4053    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4054        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4055    }
4056
4057    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4058        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4059        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4060    }
4061
4062    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4063        uint8_t fwk_videoStab = (uint8_t) *videoStab;
4064        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4065    }
4066
4067    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4068        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4069        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4070    }
4071
4072    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4073        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4074    }
4075
4076    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4077        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4078
4079        CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4080          blackLevelSourcePattern->cam_black_level[0],
4081          blackLevelSourcePattern->cam_black_level[1],
4082          blackLevelSourcePattern->cam_black_level[2],
4083          blackLevelSourcePattern->cam_black_level[3]);
4084    }
4085
4086    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4087        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4088        float fwk_blackLevelInd[4];
4089
4090        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4091        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4092        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4093        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4094
4095        CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4096          blackLevelAppliedPattern->cam_black_level[0],
4097          blackLevelAppliedPattern->cam_black_level[1],
4098          blackLevelAppliedPattern->cam_black_level[2],
4099          blackLevelAppliedPattern->cam_black_level[3]);
4100        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4101        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4102    }
4103
4104
4105    if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4106        gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4107        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4108        for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4109            opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4110        }
4111        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4112                opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4113    }
4114
4115    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4116            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4117        int32_t scalerCropRegion[4];
4118        scalerCropRegion[0] = hScalerCropRegion->left;
4119        scalerCropRegion[1] = hScalerCropRegion->top;
4120        scalerCropRegion[2] = hScalerCropRegion->width;
4121        scalerCropRegion[3] = hScalerCropRegion->height;
4122
4123        // Adjust crop region from sensor output coordinate system to active
4124        // array coordinate system.
4125        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4126                scalerCropRegion[2], scalerCropRegion[3]);
4127
4128        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4129    }
4130
4131    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4132        CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4133        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4134    }
4135
4136    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4137            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4138        CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4139        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4140    }
4141
4142    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4143            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4144        CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4145        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4146                sensorRollingShutterSkew, 1);
4147    }
4148
4149    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4150        CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4151        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4152
4153        //calculate the noise profile based on sensitivity
4154        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4155        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4156        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4157        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4158            noise_profile[i]   = noise_profile_S;
4159            noise_profile[i+1] = noise_profile_O;
4160        }
4161        CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4162                noise_profile_S, noise_profile_O);
4163        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4164                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4165    }
4166
4167    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4168        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4169        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4170    }
4171
4172    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4173        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4174                *faceDetectMode);
4175        if (NAME_NOT_FOUND != val) {
4176            uint8_t fwk_faceDetectMode = (uint8_t)val;
4177            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4178
4179            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4180                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4181                        CAM_INTF_META_FACE_DETECTION, metadata) {
4182                    uint8_t numFaces = MIN(
4183                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4184                    int32_t faceIds[MAX_ROI];
4185                    uint8_t faceScores[MAX_ROI];
4186                    int32_t faceRectangles[MAX_ROI * 4];
4187                    int32_t faceLandmarks[MAX_ROI * 6];
4188                    size_t j = 0, k = 0;
4189
4190                    for (size_t i = 0; i < numFaces; i++) {
4191                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4192                        // Adjust crop region from sensor output coordinate system to active
4193                        // array coordinate system.
4194                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4195                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4196                                rect.width, rect.height);
4197
4198                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4199                                faceRectangles+j, -1);
4200
4201                        // Map the co-ordinate sensor output coordinate system to active
4202                        // array coordinate system.
4203                        cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4204                        mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4205                                face.left_eye_center.y);
4206                        mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4207                                face.right_eye_center.y);
4208                        mCropRegionMapper.toActiveArray(face.mouth_center.x,
4209                                face.mouth_center.y);
4210
4211                        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4212                        j+= 4;
4213                        k+= 6;
4214                    }
4215                    if (numFaces <= 0) {
4216                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4217                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4218                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4219                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4220                    }
4221
4222                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4223                            numFaces);
4224                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4225                            faceRectangles, numFaces * 4U);
4226                    if (fwk_faceDetectMode ==
4227                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4228                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4229                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4230                                faceLandmarks, numFaces * 6U);
4231                   }
4232                }
4233            }
4234        }
4235    }
4236
4237    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4238        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4239        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4240    }
4241
4242    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4243            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4244        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4245        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4246    }
4247
4248    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4249            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4250        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4251                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4252    }
4253
4254    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4255            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4256        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4257                CAM_MAX_SHADING_MAP_HEIGHT);
4258        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4259                CAM_MAX_SHADING_MAP_WIDTH);
4260        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4261                lensShadingMap->lens_shading, 4U * map_width * map_height);
4262    }
4263
4264    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4265        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4266        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4267    }
4268
4269    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4270        //Populate CAM_INTF_META_TONEMAP_CURVES
4271        /* ch0 = G, ch 1 = B, ch 2 = R*/
4272        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4273            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4274                    __func__, tonemap->tonemap_points_cnt,
4275                    CAM_MAX_TONEMAP_CURVE_SIZE);
4276            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4277        }
4278
4279        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4280                        &tonemap->curves[0].tonemap_points[0][0],
4281                        tonemap->tonemap_points_cnt * 2);
4282
4283        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4284                        &tonemap->curves[1].tonemap_points[0][0],
4285                        tonemap->tonemap_points_cnt * 2);
4286
4287        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4288                        &tonemap->curves[2].tonemap_points[0][0],
4289                        tonemap->tonemap_points_cnt * 2);
4290    }
4291
4292    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4293            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4294        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4295                CC_GAINS_COUNT);
4296    }
4297
4298    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4299            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4300        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4301                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4302                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4303    }
4304
4305    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4306            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4307        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4308            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4309                    __func__, toneCurve->tonemap_points_cnt,
4310                    CAM_MAX_TONEMAP_CURVE_SIZE);
4311            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4312        }
4313        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4314                (float*)toneCurve->curve.tonemap_points,
4315                toneCurve->tonemap_points_cnt * 2);
4316    }
4317
4318    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4319            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4320        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4321                predColorCorrectionGains->gains, 4);
4322    }
4323
4324    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4325            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4326        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4327                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4328                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4329    }
4330
4331    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4332        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4333    }
4334
4335    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4336        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4337        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4338    }
4339
4340    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4341        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4342        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4343    }
4344
4345    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4346        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4347                *effectMode);
4348        if (NAME_NOT_FOUND != val) {
4349            uint8_t fwk_effectMode = (uint8_t)val;
4350            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4351        }
4352    }
4353
4354    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4355            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4356        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4357                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4358        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4359            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4360        }
4361        int32_t fwk_testPatternData[4];
4362        fwk_testPatternData[0] = testPatternData->r;
4363        fwk_testPatternData[3] = testPatternData->b;
4364        switch (gCamCapability[mCameraId]->color_arrangement) {
4365        case CAM_FILTER_ARRANGEMENT_RGGB:
4366        case CAM_FILTER_ARRANGEMENT_GRBG:
4367            fwk_testPatternData[1] = testPatternData->gr;
4368            fwk_testPatternData[2] = testPatternData->gb;
4369            break;
4370        case CAM_FILTER_ARRANGEMENT_GBRG:
4371        case CAM_FILTER_ARRANGEMENT_BGGR:
4372            fwk_testPatternData[2] = testPatternData->gr;
4373            fwk_testPatternData[1] = testPatternData->gb;
4374            break;
4375        default:
4376            ALOGE("%s: color arrangement %d is not supported", __func__,
4377                gCamCapability[mCameraId]->color_arrangement);
4378            break;
4379        }
4380        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4381    }
4382
4383    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4384        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4385    }
4386
4387    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4388        String8 str((const char *)gps_methods);
4389        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4390    }
4391
4392    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4393        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4394    }
4395
4396    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4397        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4398    }
4399
4400    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4401        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4402        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4403    }
4404
4405    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4406        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4407        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4408    }
4409
4410    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4411        int32_t fwk_thumb_size[2];
4412        fwk_thumb_size[0] = thumb_size->width;
4413        fwk_thumb_size[1] = thumb_size->height;
4414        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4415    }
4416
4417    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4418        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4419                privateData,
4420                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4421    }
4422
4423    if (metadata->is_tuning_params_valid) {
4424        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4425        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4426        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4427
4428
4429        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4430                sizeof(uint32_t));
4431        data += sizeof(uint32_t);
4432
4433        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4434                sizeof(uint32_t));
4435        CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4436        data += sizeof(uint32_t);
4437
4438        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4439                sizeof(uint32_t));
4440        CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4441        data += sizeof(uint32_t);
4442
4443        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4444                sizeof(uint32_t));
4445        CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4446        data += sizeof(uint32_t);
4447
4448        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4449                sizeof(uint32_t));
4450        CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4451        data += sizeof(uint32_t);
4452
4453        metadata->tuning_params.tuning_mod3_data_size = 0;
4454        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4455                sizeof(uint32_t));
4456        CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4457        data += sizeof(uint32_t);
4458
4459        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4460                TUNING_SENSOR_DATA_MAX);
4461        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4462                count);
4463        data += count;
4464
4465        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4466                TUNING_VFE_DATA_MAX);
4467        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4468                count);
4469        data += count;
4470
4471        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4472                TUNING_CPP_DATA_MAX);
4473        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4474                count);
4475        data += count;
4476
4477        count = MIN(metadata->tuning_params.tuning_cac_data_size,
4478                TUNING_CAC_DATA_MAX);
4479        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4480                count);
4481        data += count;
4482
4483        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4484                (int32_t *)(void *)tuning_meta_data_blob,
4485                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4486    }
4487
4488    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4489            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4490        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4491                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4492                NEUTRAL_COL_POINTS);
4493    }
4494
4495    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4496        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4497        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4498    }
4499
4500    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4501        int32_t aeRegions[REGIONS_TUPLE_COUNT];
4502        // Adjust crop region from sensor output coordinate system to active
4503        // array coordinate system.
4504        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4505                hAeRegions->rect.width, hAeRegions->rect.height);
4506
4507        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4508        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4509                REGIONS_TUPLE_COUNT);
4510        CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4511                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4512                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4513                hAeRegions->rect.height);
4514    }
4515
4516    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4517        uint8_t fwk_afState = (uint8_t) *afState;
4518        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4519        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4520    }
4521
4522    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4523        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4524    }
4525
4526    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4527        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4528    }
4529
4530    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4531        uint8_t fwk_lensState = *lensState;
4532        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4533    }
4534
4535    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4536        /*af regions*/
4537        int32_t afRegions[REGIONS_TUPLE_COUNT];
4538        // Adjust crop region from sensor output coordinate system to active
4539        // array coordinate system.
4540        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4541                hAfRegions->rect.width, hAfRegions->rect.height);
4542
4543        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4544        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4545                REGIONS_TUPLE_COUNT);
4546        CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4547                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4548                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4549                hAfRegions->rect.height);
4550    }
4551
4552    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4553        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4554                *hal_ab_mode);
4555        if (NAME_NOT_FOUND != val) {
4556            uint8_t fwk_ab_mode = (uint8_t)val;
4557            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4558        }
4559    }
4560
4561    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4562        int val = lookupFwkName(SCENE_MODES_MAP,
4563                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4564        if (NAME_NOT_FOUND != val) {
4565            uint8_t fwkBestshotMode = (uint8_t)val;
4566            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4567            CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4568        } else {
4569            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4570        }
4571    }
4572
4573    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4574         uint8_t fwk_mode = (uint8_t) *mode;
4575         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4576    }
4577
4578    /* Constant metadata values to be update*/
4579    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4580    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4581
4582    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4583    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4584
4585    int32_t hotPixelMap[2];
4586    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4587
4588    // CDS
4589    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4590        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4591    }
4592
4593    // TNR
4594    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4595        uint8_t tnr_enable       = tnr->denoise_enable;
4596        int32_t tnr_process_type = (int32_t)tnr->process_plates;
4597
4598        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4599        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4600    }
4601
4602    // Reprocess crop data
4603    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4604        uint8_t cnt = crop_data->num_of_streams;
4605        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4606            // mm-qcamera-daemon only posts crop_data for streams
4607            // not linked to pproc. So no valid crop metadata is not
4608            // necessarily an error case.
4609            CDBG("%s: No valid crop metadata entries", __func__);
4610        } else {
4611            uint32_t reproc_stream_id;
4612            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4613                CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4614            } else {
4615                int rc = NO_ERROR;
4616                Vector<int32_t> roi_map;
4617                int32_t *crop = new int32_t[cnt*4];
4618                if (NULL == crop) {
4619                   rc = NO_MEMORY;
4620                }
4621                if (NO_ERROR == rc) {
4622                    int32_t streams_found = 0;
4623                    for (size_t i = 0; i < cnt; i++) {
4624                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4625                            if (pprocDone) {
4626                                // HAL already does internal reprocessing,
4627                                // either via reprocessing before JPEG encoding,
4628                                // or offline postprocessing for pproc bypass case.
4629                                crop[0] = 0;
4630                                crop[1] = 0;
4631                                crop[2] = mInputStreamInfo.dim.width;
4632                                crop[3] = mInputStreamInfo.dim.height;
4633                            } else {
4634                                crop[0] = crop_data->crop_info[i].crop.left;
4635                                crop[1] = crop_data->crop_info[i].crop.top;
4636                                crop[2] = crop_data->crop_info[i].crop.width;
4637                                crop[3] = crop_data->crop_info[i].crop.height;
4638                            }
4639                            roi_map.add(crop_data->crop_info[i].roi_map.left);
4640                            roi_map.add(crop_data->crop_info[i].roi_map.top);
4641                            roi_map.add(crop_data->crop_info[i].roi_map.width);
4642                            roi_map.add(crop_data->crop_info[i].roi_map.height);
4643                            streams_found++;
4644                            CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4645                                    __func__,
4646                                    crop[0], crop[1], crop[2], crop[3]);
4647                            CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4648                                    __func__,
4649                                    crop_data->crop_info[i].roi_map.left,
4650                                    crop_data->crop_info[i].roi_map.top,
4651                                    crop_data->crop_info[i].roi_map.width,
4652                                    crop_data->crop_info[i].roi_map.height);
4653                            break;
4654
4655                       }
4656                    }
4657                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4658                            &streams_found, 1);
4659                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
4660                            crop, (size_t)(streams_found * 4));
4661                    if (roi_map.array()) {
4662                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4663                                roi_map.array(), roi_map.size());
4664                    }
4665               }
4666               if (crop) {
4667                   delete [] crop;
4668               }
4669            }
4670        }
4671    }
4672
4673    IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4674        int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4675                *cacMode);
4676        if (NAME_NOT_FOUND != val) {
4677            uint8_t fwkCacMode = (uint8_t)val;
4678            camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4679        } else {
4680            ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4681        }
4682    }
4683
4684    // Post blob of cam_cds_data through vendor tag.
4685    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4686        uint8_t cnt = cdsInfo->num_of_streams;
4687        cam_cds_data_t cdsDataOverride;
4688        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4689        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4690        cdsDataOverride.num_of_streams = 1;
4691        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4692            uint32_t reproc_stream_id;
4693            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4694                CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
4695            } else {
4696                for (size_t i = 0; i < cnt; i++) {
4697                    if (cdsInfo->cds_info[i].stream_id ==
4698                            reproc_stream_id) {
4699                        cdsDataOverride.cds_info[0].cds_enable =
4700                                cdsInfo->cds_info[i].cds_enable;
4701                        break;
4702                    }
4703                }
4704            }
4705        } else {
4706            CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
4707        }
4708        camMetadata.update(QCAMERA3_CDS_INFO,
4709                (uint8_t *)&cdsDataOverride,
4710                sizeof(cam_cds_data_t));
4711    }
4712
4713    // Ldaf calibration data
4714    if (!mLdafCalibExist) {
4715        IF_META_AVAILABLE(uint32_t, ldafCalib,
4716                CAM_INTF_META_LDAF_EXIF, metadata) {
4717            mLdafCalibExist = true;
4718            mLdafCalib[0] = ldafCalib[0];
4719            mLdafCalib[1] = ldafCalib[1];
4720            CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
4721                    ldafCalib[0], ldafCalib[1]);
4722        }
4723    }
4724
4725    resultMetadata = camMetadata.release();
4726    return resultMetadata;
4727}
4728
4729/*===========================================================================
4730 * FUNCTION   : saveExifParams
4731 *
4732 * DESCRIPTION:
4733 *
4734 * PARAMETERS :
4735 *   @metadata : metadata information from callback
4736 *
4737 * RETURN     : none
4738 *
4739 *==========================================================================*/
4740void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
4741{
4742    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
4743            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
4744        mExifParams.ae_debug_params = *ae_exif_debug_params;
4745        mExifParams.ae_debug_params_valid = TRUE;
4746    }
4747    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
4748            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
4749        mExifParams.awb_debug_params = *awb_exif_debug_params;
4750        mExifParams.awb_debug_params_valid = TRUE;
4751    }
4752    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
4753            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
4754        mExifParams.af_debug_params = *af_exif_debug_params;
4755        mExifParams.af_debug_params_valid = TRUE;
4756    }
4757    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
4758            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
4759        mExifParams.asd_debug_params = *asd_exif_debug_params;
4760        mExifParams.asd_debug_params_valid = TRUE;
4761    }
4762    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
4763            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
4764        mExifParams.stats_debug_params = *stats_exif_debug_params;
4765        mExifParams.stats_debug_params_valid = TRUE;
4766    }
4767}
4768
4769/*===========================================================================
4770 * FUNCTION   : get3AExifParams
4771 *
4772 * DESCRIPTION:
4773 *
4774 * PARAMETERS : none
4775 *
4776 *
4777 * RETURN     : mm_jpeg_exif_params_t
4778 *
4779 *==========================================================================*/
4780mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
4781{
4782    return mExifParams;
4783}
4784
4785/*===========================================================================
4786 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
4787 *
4788 * DESCRIPTION:
4789 *
4790 * PARAMETERS :
4791 *   @metadata : metadata information from callback
4792 *
4793 * RETURN     : camera_metadata_t*
4794 *              metadata in a format specified by fwk
4795 *==========================================================================*/
4796camera_metadata_t*
4797QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
4798                                (metadata_buffer_t *metadata)
4799{
4800    CameraMetadata camMetadata;
4801    camera_metadata_t *resultMetadata;
4802
4803
4804    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
4805        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
4806        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
4807        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
4808    }
4809
4810    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
4811        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
4812                &aecTrigger->trigger, 1);
4813        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
4814                &aecTrigger->trigger_id, 1);
4815        CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
4816                __func__, aecTrigger->trigger);
4817        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
4818                aecTrigger->trigger_id);
4819    }
4820
4821    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
4822        uint8_t fwk_ae_state = (uint8_t) *ae_state;
4823        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
4824        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
4825    }
4826
4827    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4828        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4829        if (NAME_NOT_FOUND != val) {
4830            uint8_t fwkAfMode = (uint8_t)val;
4831            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4832            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
4833        } else {
4834            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__,
4835                    val);
4836        }
4837    }
4838
4839    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
4840        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
4841                &af_trigger->trigger, 1);
4842        CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
4843                __func__, af_trigger->trigger);
4844        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
4845        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
4846                af_trigger->trigger_id);
4847    }
4848
4849    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
4850        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4851                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
4852        if (NAME_NOT_FOUND != val) {
4853            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
4854            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
4855            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
4856        } else {
4857            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
4858        }
4859    }
4860
4861    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4862    uint32_t aeMode = CAM_AE_MODE_MAX;
4863    int32_t flashMode = CAM_FLASH_MODE_MAX;
4864    int32_t redeye = -1;
4865    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
4866        aeMode = *pAeMode;
4867    }
4868    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
4869        flashMode = *pFlashMode;
4870    }
4871    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
4872        redeye = *pRedeye;
4873    }
4874
4875    if (1 == redeye) {
4876        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4877        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4878    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
4879        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
4880                flashMode);
4881        if (NAME_NOT_FOUND != val) {
4882            fwk_aeMode = (uint8_t)val;
4883            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4884        } else {
4885            ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
4886        }
4887    } else if (aeMode == CAM_AE_MODE_ON) {
4888        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
4889        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4890    } else if (aeMode == CAM_AE_MODE_OFF) {
4891        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4892        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4893    } else {
4894        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
4895              "flashMode:%d, aeMode:%u!!!",
4896                __func__, redeye, flashMode, aeMode);
4897    }
4898
4899    resultMetadata = camMetadata.release();
4900    return resultMetadata;
4901}
4902
4903/*===========================================================================
4904 * FUNCTION   : dumpMetadataToFile
4905 *
4906 * DESCRIPTION: Dumps tuning metadata to file system
4907 *
4908 * PARAMETERS :
4909 *   @meta           : tuning metadata
4910 *   @dumpFrameCount : current dump frame count
4911 *   @enabled        : Enable mask
4912 *
4913 *==========================================================================*/
4914void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
4915                                                   uint32_t &dumpFrameCount,
4916                                                   bool enabled,
4917                                                   const char *type,
4918                                                   uint32_t frameNumber)
4919{
4920    uint32_t frm_num = 0;
4921
4922    //Some sanity checks
4923    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
4924        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
4925              __func__,
4926              meta.tuning_sensor_data_size,
4927              TUNING_SENSOR_DATA_MAX);
4928        return;
4929    }
4930
4931    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
4932        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
4933              __func__,
4934              meta.tuning_vfe_data_size,
4935              TUNING_VFE_DATA_MAX);
4936        return;
4937    }
4938
4939    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
4940        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
4941              __func__,
4942              meta.tuning_cpp_data_size,
4943              TUNING_CPP_DATA_MAX);
4944        return;
4945    }
4946
4947    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
4948        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
4949              __func__,
4950              meta.tuning_cac_data_size,
4951              TUNING_CAC_DATA_MAX);
4952        return;
4953    }
4954    //
4955
4956    if(enabled){
4957        char timeBuf[FILENAME_MAX];
4958        char buf[FILENAME_MAX];
4959        memset(buf, 0, sizeof(buf));
4960        memset(timeBuf, 0, sizeof(timeBuf));
4961        time_t current_time;
4962        struct tm * timeinfo;
4963        time (&current_time);
4964        timeinfo = localtime (&current_time);
4965        if (timeinfo != NULL) {
4966            strftime (timeBuf, sizeof(timeBuf),
4967                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
4968        }
4969        String8 filePath(timeBuf);
4970        snprintf(buf,
4971                sizeof(buf),
4972                "%dm_%s_%d.bin",
4973                dumpFrameCount,
4974                type,
4975                frameNumber);
4976        filePath.append(buf);
4977        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
4978        if (file_fd >= 0) {
4979            ssize_t written_len = 0;
4980            meta.tuning_data_version = TUNING_DATA_VERSION;
4981            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
4982            written_len += write(file_fd, data, sizeof(uint32_t));
4983            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
4984            CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4985            written_len += write(file_fd, data, sizeof(uint32_t));
4986            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
4987            CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4988            written_len += write(file_fd, data, sizeof(uint32_t));
4989            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
4990            CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4991            written_len += write(file_fd, data, sizeof(uint32_t));
4992            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
4993            CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4994            written_len += write(file_fd, data, sizeof(uint32_t));
4995            meta.tuning_mod3_data_size = 0;
4996            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
4997            CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4998            written_len += write(file_fd, data, sizeof(uint32_t));
4999            size_t total_size = meta.tuning_sensor_data_size;
5000            data = (void *)((uint8_t *)&meta.data);
5001            written_len += write(file_fd, data, total_size);
5002            total_size = meta.tuning_vfe_data_size;
5003            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5004            written_len += write(file_fd, data, total_size);
5005            total_size = meta.tuning_cpp_data_size;
5006            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5007            written_len += write(file_fd, data, total_size);
5008            total_size = meta.tuning_cac_data_size;
5009            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5010            written_len += write(file_fd, data, total_size);
5011            close(file_fd);
5012        }else {
5013            ALOGE("%s: fail to open file for metadata dumping", __func__);
5014        }
5015    }
5016}
5017
5018/*===========================================================================
5019 * FUNCTION   : cleanAndSortStreamInfo
5020 *
5021 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5022 *              and sort them such that raw stream is at the end of the list
5023 *              This is a workaround for camera daemon constraint.
5024 *
5025 * PARAMETERS : None
5026 *
5027 *==========================================================================*/
5028void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5029{
5030    List<stream_info_t *> newStreamInfo;
5031
5032    /*clean up invalid streams*/
5033    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5034            it != mStreamInfo.end();) {
5035        if(((*it)->status) == INVALID){
5036            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5037            delete channel;
5038            free(*it);
5039            it = mStreamInfo.erase(it);
5040        } else {
5041            it++;
5042        }
5043    }
5044
5045    // Move preview/video/callback/snapshot streams into newList
5046    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5047            it != mStreamInfo.end();) {
5048        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5049                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5050                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5051            newStreamInfo.push_back(*it);
5052            it = mStreamInfo.erase(it);
5053        } else
5054            it++;
5055    }
5056    // Move raw streams into newList
5057    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5058            it != mStreamInfo.end();) {
5059        newStreamInfo.push_back(*it);
5060        it = mStreamInfo.erase(it);
5061    }
5062
5063    mStreamInfo = newStreamInfo;
5064}
5065
5066/*===========================================================================
5067 * FUNCTION   : extractJpegMetadata
5068 *
5069 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5070 *              JPEG metadata is cached in HAL, and return as part of capture
5071 *              result when metadata is returned from camera daemon.
5072 *
5073 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5074 *              @request:      capture request
5075 *
5076 *==========================================================================*/
5077void QCamera3HardwareInterface::extractJpegMetadata(
5078        CameraMetadata& jpegMetadata,
5079        const camera3_capture_request_t *request)
5080{
5081    CameraMetadata frame_settings;
5082    frame_settings = request->settings;
5083
5084    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5085        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5086                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5087                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5088
5089    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5090        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5091                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5092                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5093
5094    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5095        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5096                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5097                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5098
5099    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5100        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5101                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5102                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5103
5104    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5105        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5106                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5107                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5108
5109    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5110        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5111                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5112                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5113
5114    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5115        int32_t thumbnail_size[2];
5116        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5117        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5118        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5119            int32_t orientation =
5120                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5121            if ((orientation == 90) || (orientation == 270)) {
5122               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5123               int32_t temp;
5124               temp = thumbnail_size[0];
5125               thumbnail_size[0] = thumbnail_size[1];
5126               thumbnail_size[1] = temp;
5127            }
5128         }
5129         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5130                thumbnail_size,
5131                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5132    }
5133
5134}
5135
5136/*===========================================================================
5137 * FUNCTION   : convertToRegions
5138 *
5139 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5140 *
5141 * PARAMETERS :
5142 *   @rect   : cam_rect_t struct to convert
5143 *   @region : int32_t destination array
5144 *   @weight : if we are converting from cam_area_t, weight is valid
5145 *             else weight = -1
5146 *
5147 *==========================================================================*/
5148void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5149        int32_t *region, int weight)
5150{
5151    region[0] = rect.left;
5152    region[1] = rect.top;
5153    region[2] = rect.left + rect.width;
5154    region[3] = rect.top + rect.height;
5155    if (weight > -1) {
5156        region[4] = weight;
5157    }
5158}
5159
5160/*===========================================================================
5161 * FUNCTION   : convertFromRegions
5162 *
5163 * DESCRIPTION: helper method to convert from array to cam_rect_t
5164 *
5165 * PARAMETERS :
5166 *   @rect   : cam_rect_t struct to convert
5167 *   @region : int32_t destination array
5168 *   @weight : if we are converting from cam_area_t, weight is valid
5169 *             else weight = -1
5170 *
5171 *==========================================================================*/
5172void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5173        const camera_metadata_t *settings, uint32_t tag)
5174{
5175    CameraMetadata frame_settings;
5176    frame_settings = settings;
5177    int32_t x_min = frame_settings.find(tag).data.i32[0];
5178    int32_t y_min = frame_settings.find(tag).data.i32[1];
5179    int32_t x_max = frame_settings.find(tag).data.i32[2];
5180    int32_t y_max = frame_settings.find(tag).data.i32[3];
5181    roi.weight = frame_settings.find(tag).data.i32[4];
5182    roi.rect.left = x_min;
5183    roi.rect.top = y_min;
5184    roi.rect.width = x_max - x_min;
5185    roi.rect.height = y_max - y_min;
5186}
5187
5188/*===========================================================================
5189 * FUNCTION   : resetIfNeededROI
5190 *
5191 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5192 *              crop region
5193 *
5194 * PARAMETERS :
5195 *   @roi       : cam_area_t struct to resize
5196 *   @scalerCropRegion : cam_crop_region_t region to compare against
5197 *
5198 *
5199 *==========================================================================*/
5200bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5201                                                 const cam_crop_region_t* scalerCropRegion)
5202{
5203    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5204    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5205    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5206    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5207
5208    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5209     * without having this check the calculations below to validate if the roi
5210     * is inside scalar crop region will fail resulting in the roi not being
5211     * reset causing algorithm to continue to use stale roi window
5212     */
5213    if (roi->weight == 0) {
5214        return true;
5215    }
5216
5217    if ((roi_x_max < scalerCropRegion->left) ||
5218        // right edge of roi window is left of scalar crop's left edge
5219        (roi_y_max < scalerCropRegion->top)  ||
5220        // bottom edge of roi window is above scalar crop's top edge
5221        (roi->rect.left > crop_x_max) ||
5222        // left edge of roi window is beyond(right) of scalar crop's right edge
5223        (roi->rect.top > crop_y_max)){
5224        // top edge of roi windo is above scalar crop's top edge
5225        return false;
5226    }
5227    if (roi->rect.left < scalerCropRegion->left) {
5228        roi->rect.left = scalerCropRegion->left;
5229    }
5230    if (roi->rect.top < scalerCropRegion->top) {
5231        roi->rect.top = scalerCropRegion->top;
5232    }
5233    if (roi_x_max > crop_x_max) {
5234        roi_x_max = crop_x_max;
5235    }
5236    if (roi_y_max > crop_y_max) {
5237        roi_y_max = crop_y_max;
5238    }
5239    roi->rect.width = roi_x_max - roi->rect.left;
5240    roi->rect.height = roi_y_max - roi->rect.top;
5241    return true;
5242}
5243
5244/*===========================================================================
5245 * FUNCTION   : convertLandmarks
5246 *
5247 * DESCRIPTION: helper method to extract the landmarks from face detection info
5248 *
5249 * PARAMETERS :
5250 *   @face   : cam_rect_t struct to convert
5251 *   @landmarks : int32_t destination array
5252 *
5253 *
5254 *==========================================================================*/
5255void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5256{
5257    landmarks[0] = (int32_t)face.left_eye_center.x;
5258    landmarks[1] = (int32_t)face.left_eye_center.y;
5259    landmarks[2] = (int32_t)face.right_eye_center.x;
5260    landmarks[3] = (int32_t)face.right_eye_center.y;
5261    landmarks[4] = (int32_t)face.mouth_center.x;
5262    landmarks[5] = (int32_t)face.mouth_center.y;
5263}
5264
5265#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5266/*===========================================================================
5267 * FUNCTION   : initCapabilities
5268 *
5269 * DESCRIPTION: initialize camera capabilities in static data struct
5270 *
5271 * PARAMETERS :
5272 *   @cameraId  : camera Id
5273 *
5274 * RETURN     : int32_t type of status
5275 *              NO_ERROR  -- success
5276 *              none-zero failure code
5277 *==========================================================================*/
5278int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5279{
5280    int rc = 0;
5281    mm_camera_vtbl_t *cameraHandle = NULL;
5282    QCamera3HeapMemory *capabilityHeap = NULL;
5283
5284    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5285    if (rc || !cameraHandle) {
5286        ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5287        goto open_failed;
5288    }
5289
5290    capabilityHeap = new QCamera3HeapMemory(1);
5291    if (capabilityHeap == NULL) {
5292        ALOGE("%s: creation of capabilityHeap failed", __func__);
5293        goto heap_creation_failed;
5294    }
5295    /* Allocate memory for capability buffer */
5296    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5297    if(rc != OK) {
5298        ALOGE("%s: No memory for cappability", __func__);
5299        goto allocate_failed;
5300    }
5301
5302    /* Map memory for capability buffer */
5303    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5304    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5305                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5306                                capabilityHeap->getFd(0),
5307                                sizeof(cam_capability_t));
5308    if(rc < 0) {
5309        ALOGE("%s: failed to map capability buffer", __func__);
5310        goto map_failed;
5311    }
5312
5313    /* Query Capability */
5314    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5315    if(rc < 0) {
5316        ALOGE("%s: failed to query capability",__func__);
5317        goto query_failed;
5318    }
5319    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5320    if (!gCamCapability[cameraId]) {
5321        ALOGE("%s: out of memory", __func__);
5322        goto query_failed;
5323    }
5324    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5325                                        sizeof(cam_capability_t));
5326    rc = 0;
5327
5328query_failed:
5329    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5330                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5331map_failed:
5332    capabilityHeap->deallocate();
5333allocate_failed:
5334    delete capabilityHeap;
5335heap_creation_failed:
5336    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5337    cameraHandle = NULL;
5338open_failed:
5339    return rc;
5340}
5341
5342/*==========================================================================
5343 * FUNCTION   : get3Aversion
5344 *
5345 * DESCRIPTION: get the Q3A S/W version
5346 *
5347 * PARAMETERS :
5348 *  @sw_version: Reference of Q3A structure which will hold version info upon
5349 *               return
5350 *
5351 * RETURN     : None
5352 *
5353 *==========================================================================*/
5354void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5355{
5356    if(gCamCapability[mCameraId])
5357        sw_version = gCamCapability[mCameraId]->q3a_version;
5358    else
5359        ALOGE("%s:Capability structure NULL!", __func__);
5360}
5361
5362
5363/*===========================================================================
5364 * FUNCTION   : initParameters
5365 *
5366 * DESCRIPTION: initialize camera parameters
5367 *
5368 * PARAMETERS :
5369 *
5370 * RETURN     : int32_t type of status
5371 *              NO_ERROR  -- success
5372 *              none-zero failure code
5373 *==========================================================================*/
5374int QCamera3HardwareInterface::initParameters()
5375{
5376    int rc = 0;
5377
5378    //Allocate Set Param Buffer
5379    mParamHeap = new QCamera3HeapMemory(1);
5380    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5381    if(rc != OK) {
5382        rc = NO_MEMORY;
5383        ALOGE("Failed to allocate SETPARM Heap memory");
5384        delete mParamHeap;
5385        mParamHeap = NULL;
5386        return rc;
5387    }
5388
5389    //Map memory for parameters buffer
5390    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5391            CAM_MAPPING_BUF_TYPE_PARM_BUF,
5392            mParamHeap->getFd(0),
5393            sizeof(metadata_buffer_t));
5394    if(rc < 0) {
5395        ALOGE("%s:failed to map SETPARM buffer",__func__);
5396        rc = FAILED_TRANSACTION;
5397        mParamHeap->deallocate();
5398        delete mParamHeap;
5399        mParamHeap = NULL;
5400        return rc;
5401    }
5402
5403    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5404
5405    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5406    return rc;
5407}
5408
5409/*===========================================================================
5410 * FUNCTION   : deinitParameters
5411 *
5412 * DESCRIPTION: de-initialize camera parameters
5413 *
5414 * PARAMETERS :
5415 *
5416 * RETURN     : NONE
5417 *==========================================================================*/
5418void QCamera3HardwareInterface::deinitParameters()
5419{
5420    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5421            CAM_MAPPING_BUF_TYPE_PARM_BUF);
5422
5423    mParamHeap->deallocate();
5424    delete mParamHeap;
5425    mParamHeap = NULL;
5426
5427    mParameters = NULL;
5428
5429    free(mPrevParameters);
5430    mPrevParameters = NULL;
5431}
5432
5433/*===========================================================================
5434 * FUNCTION   : calcMaxJpegSize
5435 *
5436 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5437 *
5438 * PARAMETERS :
5439 *
5440 * RETURN     : max_jpeg_size
5441 *==========================================================================*/
5442size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5443{
5444    size_t max_jpeg_size = 0;
5445    size_t temp_width, temp_height;
5446    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5447            MAX_SIZES_CNT);
5448    for (size_t i = 0; i < count; i++) {
5449        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5450        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5451        if (temp_width * temp_height > max_jpeg_size ) {
5452            max_jpeg_size = temp_width * temp_height;
5453        }
5454    }
5455    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5456    return max_jpeg_size;
5457}
5458
5459/*===========================================================================
5460 * FUNCTION   : getMaxRawSize
5461 *
5462 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5463 *
5464 * PARAMETERS :
5465 *
5466 * RETURN     : Largest supported Raw Dimension
5467 *==========================================================================*/
5468cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5469{
5470    int max_width = 0;
5471    cam_dimension_t maxRawSize;
5472
5473    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5474    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5475        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5476            max_width = gCamCapability[camera_id]->raw_dim[i].width;
5477            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5478        }
5479    }
5480    return maxRawSize;
5481}
5482
5483
5484/*===========================================================================
5485 * FUNCTION   : calcMaxJpegDim
5486 *
5487 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5488 *
5489 * PARAMETERS :
5490 *
5491 * RETURN     : max_jpeg_dim
5492 *==========================================================================*/
5493cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5494{
5495    cam_dimension_t max_jpeg_dim;
5496    cam_dimension_t curr_jpeg_dim;
5497    max_jpeg_dim.width = 0;
5498    max_jpeg_dim.height = 0;
5499    curr_jpeg_dim.width = 0;
5500    curr_jpeg_dim.height = 0;
5501    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5502        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5503        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5504        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5505            max_jpeg_dim.width * max_jpeg_dim.height ) {
5506            max_jpeg_dim.width = curr_jpeg_dim.width;
5507            max_jpeg_dim.height = curr_jpeg_dim.height;
5508        }
5509    }
5510    return max_jpeg_dim;
5511}
5512
5513/*===========================================================================
5514 * FUNCTION   : addStreamConfig
5515 *
5516 * DESCRIPTION: adds the stream configuration to the array
5517 *
5518 * PARAMETERS :
5519 * @available_stream_configs : pointer to stream configuration array
5520 * @scalar_format            : scalar format
5521 * @dim                      : configuration dimension
5522 * @config_type              : input or output configuration type
5523 *
5524 * RETURN     : NONE
5525 *==========================================================================*/
5526void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5527        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5528{
5529    available_stream_configs.add(scalar_format);
5530    available_stream_configs.add(dim.width);
5531    available_stream_configs.add(dim.height);
5532    available_stream_configs.add(config_type);
5533}
5534
5535
5536/*===========================================================================
5537 * FUNCTION   : initStaticMetadata
5538 *
5539 * DESCRIPTION: initialize the static metadata
5540 *
5541 * PARAMETERS :
5542 *   @cameraId  : camera Id
5543 *
5544 * RETURN     : int32_t type of status
5545 *              0  -- success
5546 *              non-zero failure code
5547 *==========================================================================*/
5548int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5549{
5550    int rc = 0;
5551    CameraMetadata staticInfo;
5552    size_t count = 0;
5553    bool limitedDevice = false;
5554    char prop[PROPERTY_VALUE_MAX];
5555
5556    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5557     * guaranteed, its advertised as limited device */
5558    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5559            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5560
5561    uint8_t supportedHwLvl = limitedDevice ?
5562            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5563            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
5564
5565    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5566            &supportedHwLvl, 1);
5567
5568    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5569    /*HAL 3 only*/
5570    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5571                    &gCamCapability[cameraId]->min_focus_distance, 1);
5572
5573    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5574                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
5575
5576    /*should be using focal lengths but sensor doesn't provide that info now*/
5577    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5578                      &gCamCapability[cameraId]->focal_length,
5579                      1);
5580
5581    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5582                      gCamCapability[cameraId]->apertures,
5583                      gCamCapability[cameraId]->apertures_count);
5584
5585    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5586                gCamCapability[cameraId]->filter_densities,
5587                gCamCapability[cameraId]->filter_densities_count);
5588
5589
5590    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5591                      (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5592                      gCamCapability[cameraId]->optical_stab_modes_count);
5593
5594    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5595            gCamCapability[cameraId]->lens_shading_map_size.height};
5596    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5597                      lens_shading_map_size,
5598                      sizeof(lens_shading_map_size)/sizeof(int32_t));
5599
5600    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5601            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5602
5603    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5604            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5605
5606    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5607            &gCamCapability[cameraId]->max_frame_duration, 1);
5608
5609    camera_metadata_rational baseGainFactor = {
5610            gCamCapability[cameraId]->base_gain_factor.numerator,
5611            gCamCapability[cameraId]->base_gain_factor.denominator};
5612    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5613                      &baseGainFactor, 1);
5614
5615    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5616                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5617
5618    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5619            gCamCapability[cameraId]->pixel_array_size.height};
5620    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5621                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5622
5623    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5624                                                gCamCapability[cameraId]->active_array_size.top,
5625                                                gCamCapability[cameraId]->active_array_size.width,
5626                                                gCamCapability[cameraId]->active_array_size.height};
5627    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5628                      active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5629
5630    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5631            &gCamCapability[cameraId]->white_level, 1);
5632
5633    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5634            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5635
5636    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5637                      &gCamCapability[cameraId]->flash_charge_duration, 1);
5638
5639    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5640                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5641
5642    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
5643    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5644            &timestampSource, 1);
5645
5646    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5647                      &gCamCapability[cameraId]->histogram_size, 1);
5648
5649    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5650            &gCamCapability[cameraId]->max_histogram_count, 1);
5651
5652    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5653            gCamCapability[cameraId]->sharpness_map_size.height};
5654
5655    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5656            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5657
5658    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5659            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5660
5661    int32_t scalar_formats[] = {
5662            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5663            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5664            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5665            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5666            HAL_PIXEL_FORMAT_RAW10,
5667            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5668    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5669    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5670                      scalar_formats,
5671                      scalar_formats_count);
5672
5673    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5674    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5675    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
5676            count, MAX_SIZES_CNT, available_processed_sizes);
5677    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
5678            available_processed_sizes, count * 2);
5679
5680    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
5681    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
5682    makeTable(gCamCapability[cameraId]->raw_dim,
5683            count, MAX_SIZES_CNT, available_raw_sizes);
5684    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
5685            available_raw_sizes, count * 2);
5686
5687    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
5688    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
5689    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
5690            count, MAX_SIZES_CNT, available_fps_ranges);
5691    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5692            available_fps_ranges, count * 2);
5693
5694    camera_metadata_rational exposureCompensationStep = {
5695            gCamCapability[cameraId]->exp_compensation_step.numerator,
5696            gCamCapability[cameraId]->exp_compensation_step.denominator};
5697    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
5698                      &exposureCompensationStep, 1);
5699
5700    Vector<uint8_t> availableVstabModes;
5701    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
5702    char eis_prop[PROPERTY_VALUE_MAX];
5703    memset(eis_prop, 0, sizeof(eis_prop));
5704    property_get("persist.camera.eis.enable", eis_prop, "0");
5705    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
5706    if (facingBack && eis_prop_set) {
5707        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
5708    }
5709    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5710                      availableVstabModes.array(), availableVstabModes.size());
5711
5712    /*HAL 1 and HAL 3 common*/
5713    float maxZoom = 4;
5714    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5715            &maxZoom, 1);
5716
5717    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
5718    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
5719
5720    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
5721    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
5722        max3aRegions[2] = 0; /* AF not supported */
5723    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
5724            max3aRegions, 3);
5725
5726    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
5727    memset(prop, 0, sizeof(prop));
5728    property_get("persist.camera.facedetect", prop, "1");
5729    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
5730    CDBG("%s: Support face detection mode: %d",
5731            __func__, supportedFaceDetectMode);
5732
5733    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
5734    Vector<uint8_t> availableFaceDetectModes;
5735    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
5736    if (supportedFaceDetectMode == 1) {
5737        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5738    } else if (supportedFaceDetectMode == 2) {
5739        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5740    } else if (supportedFaceDetectMode == 3) {
5741        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5742        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5743    } else {
5744        maxFaces = 0;
5745    }
5746    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5747            availableFaceDetectModes.array(),
5748            availableFaceDetectModes.size());
5749    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
5750            (int32_t *)&maxFaces, 1);
5751
5752    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
5753                                           gCamCapability[cameraId]->exposure_compensation_max};
5754    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
5755            exposureCompensationRange,
5756            sizeof(exposureCompensationRange)/sizeof(int32_t));
5757
5758    uint8_t lensFacing = (facingBack) ?
5759            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
5760    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
5761
5762    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5763                      available_thumbnail_sizes,
5764                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
5765
5766    /*all sizes will be clubbed into this tag*/
5767    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
5768    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5769    size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
5770            count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
5771            gCamCapability[cameraId]->max_downscale_factor);
5772    /*android.scaler.availableStreamConfigurations*/
5773    size_t max_stream_configs_size = count * scalar_formats_count * 4;
5774    Vector<int32_t> available_stream_configs;
5775    cam_dimension_t active_array_dim;
5776    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
5777    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
5778    /* Add input/output stream configurations for each scalar formats*/
5779    for (size_t j = 0; j < scalar_formats_count; j++) {
5780        switch (scalar_formats[j]) {
5781        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5782        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5783        case HAL_PIXEL_FORMAT_RAW10:
5784            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5785                addStreamConfig(available_stream_configs, scalar_formats[j],
5786                        gCamCapability[cameraId]->raw_dim[i],
5787                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5788            }
5789            break;
5790        case HAL_PIXEL_FORMAT_BLOB:
5791            cam_dimension_t jpeg_size;
5792            for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
5793                jpeg_size.width  = available_jpeg_sizes[i*2];
5794                jpeg_size.height = available_jpeg_sizes[i*2+1];
5795                addStreamConfig(available_stream_configs, scalar_formats[j],
5796                        jpeg_size,
5797                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5798            }
5799            break;
5800        case HAL_PIXEL_FORMAT_YCbCr_420_888:
5801        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
5802        default:
5803            cam_dimension_t largest_picture_size;
5804            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
5805            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5806                addStreamConfig(available_stream_configs, scalar_formats[j],
5807                        gCamCapability[cameraId]->picture_sizes_tbl[i],
5808                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5809                /* Book keep largest */
5810                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
5811                        >= largest_picture_size.width &&
5812                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
5813                        >= largest_picture_size.height)
5814                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
5815            }
5816            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
5817            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
5818                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5819                 addStreamConfig(available_stream_configs, scalar_formats[j],
5820                         largest_picture_size,
5821                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
5822            }
5823            break;
5824        }
5825    }
5826
5827    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5828                      available_stream_configs.array(), available_stream_configs.size());
5829    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5830    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5831
5832    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5833    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5834
5835    /* android.scaler.availableMinFrameDurations */
5836    int64_t available_min_durations[max_stream_configs_size];
5837    size_t idx = 0;
5838    for (size_t j = 0; j < scalar_formats_count; j++) {
5839        switch (scalar_formats[j]) {
5840        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5841        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5842        case HAL_PIXEL_FORMAT_RAW10:
5843            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5844                available_min_durations[idx] = scalar_formats[j];
5845                available_min_durations[idx+1] =
5846                    gCamCapability[cameraId]->raw_dim[i].width;
5847                available_min_durations[idx+2] =
5848                    gCamCapability[cameraId]->raw_dim[i].height;
5849                available_min_durations[idx+3] =
5850                    gCamCapability[cameraId]->raw_min_duration[i];
5851                idx+=4;
5852            }
5853            break;
5854        default:
5855            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5856                available_min_durations[idx] = scalar_formats[j];
5857                available_min_durations[idx+1] =
5858                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5859                available_min_durations[idx+2] =
5860                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5861                available_min_durations[idx+3] =
5862                    gCamCapability[cameraId]->picture_min_duration[i];
5863                idx+=4;
5864            }
5865            break;
5866        }
5867    }
5868    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
5869                      &available_min_durations[0], idx);
5870
5871    Vector<int32_t> available_hfr_configs;
5872    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
5873        int32_t fps = 0;
5874        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
5875        case CAM_HFR_MODE_60FPS:
5876            fps = 60;
5877            break;
5878        case CAM_HFR_MODE_90FPS:
5879            fps = 90;
5880            break;
5881        case CAM_HFR_MODE_120FPS:
5882            fps = 120;
5883            break;
5884        case CAM_HFR_MODE_150FPS:
5885            fps = 150;
5886            break;
5887        case CAM_HFR_MODE_180FPS:
5888            fps = 180;
5889            break;
5890        case CAM_HFR_MODE_210FPS:
5891            fps = 210;
5892            break;
5893        case CAM_HFR_MODE_240FPS:
5894            fps = 240;
5895            break;
5896        case CAM_HFR_MODE_480FPS:
5897            fps = 480;
5898            break;
5899        case CAM_HFR_MODE_OFF:
5900        case CAM_HFR_MODE_MAX:
5901        default:
5902            break;
5903        }
5904
5905        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
5906        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
5907            /* For each HFR frame rate, need to advertise one variable fps range
5908             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
5909             * [120, 120]. While camcorder preview alone is running [30, 120] is
5910             * set by the app. When video recording is started, [120, 120] is
5911             * set. This way sensor configuration does not change when recording
5912             * is started */
5913
5914            /* (width, height, fps_min, fps_max, batch_size_max) */
5915            available_hfr_configs.add(
5916                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5917            available_hfr_configs.add(
5918                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5919            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
5920            available_hfr_configs.add(fps);
5921            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5922
5923            /* (width, height, fps_min, fps_max, batch_size_max) */
5924            available_hfr_configs.add(
5925                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5926            available_hfr_configs.add(
5927                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5928            available_hfr_configs.add(fps);
5929            available_hfr_configs.add(fps);
5930            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5931       }
5932    }
5933    //Advertise HFR capability only if the property is set
5934    memset(prop, 0, sizeof(prop));
5935    property_get("persist.camera.hal3hfr.enable", prop, "1");
5936    uint8_t hfrEnable = (uint8_t)atoi(prop);
5937
5938    if(hfrEnable && available_hfr_configs.array()) {
5939        staticInfo.update(
5940                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
5941                available_hfr_configs.array(), available_hfr_configs.size());
5942    }
5943
5944    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
5945    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
5946                      &max_jpeg_size, 1);
5947
5948    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
5949    size_t size = 0;
5950    count = CAM_EFFECT_MODE_MAX;
5951    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
5952    for (size_t i = 0; i < count; i++) {
5953        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5954                gCamCapability[cameraId]->supported_effects[i]);
5955        if (NAME_NOT_FOUND != val) {
5956            avail_effects[size] = (uint8_t)val;
5957            size++;
5958        }
5959    }
5960    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
5961                      avail_effects,
5962                      size);
5963
5964    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
5965    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
5966    size_t supported_scene_modes_cnt = 0;
5967    count = CAM_SCENE_MODE_MAX;
5968    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
5969    for (size_t i = 0; i < count; i++) {
5970        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
5971                CAM_SCENE_MODE_OFF) {
5972            int val = lookupFwkName(SCENE_MODES_MAP,
5973                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
5974                    gCamCapability[cameraId]->supported_scene_modes[i]);
5975            if (NAME_NOT_FOUND != val) {
5976                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
5977                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
5978                supported_scene_modes_cnt++;
5979            }
5980        }
5981    }
5982    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
5983                      avail_scene_modes,
5984                      supported_scene_modes_cnt);
5985
5986    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
5987    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
5988                      supported_scene_modes_cnt,
5989                      CAM_SCENE_MODE_MAX,
5990                      scene_mode_overrides,
5991                      supported_indexes,
5992                      cameraId);
5993
5994    if (supported_scene_modes_cnt == 0) {
5995        supported_scene_modes_cnt = 1;
5996        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
5997    }
5998
5999    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6000            scene_mode_overrides, supported_scene_modes_cnt * 3);
6001
6002    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6003                                         ANDROID_CONTROL_MODE_AUTO,
6004                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6005    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6006            available_control_modes,
6007            3);
6008
6009    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6010    size = 0;
6011    count = CAM_ANTIBANDING_MODE_MAX;
6012    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6013    for (size_t i = 0; i < count; i++) {
6014        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6015                gCamCapability[cameraId]->supported_antibandings[i]);
6016        if (NAME_NOT_FOUND != val) {
6017            avail_antibanding_modes[size] = (uint8_t)val;
6018            size++;
6019        }
6020
6021    }
6022    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6023                      avail_antibanding_modes,
6024                      size);
6025
6026    uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
6027    size = 0;
6028    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6029    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6030    if (0 == count) {
6031        avail_abberation_modes[0] =
6032                ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6033        size++;
6034    } else {
6035        for (size_t i = 0; i < count; i++) {
6036            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6037                    gCamCapability[cameraId]->aberration_modes[i]);
6038            if (NAME_NOT_FOUND != val) {
6039                avail_abberation_modes[size] = (uint8_t)val;
6040                size++;
6041            } else {
6042                ALOGE("%s: Invalid CAC mode %d", __func__,
6043                        gCamCapability[cameraId]->aberration_modes[i]);
6044                break;
6045            }
6046        }
6047
6048    }
6049    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6050            avail_abberation_modes,
6051            size);
6052
6053    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6054    size = 0;
6055    count = CAM_FOCUS_MODE_MAX;
6056    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6057    for (size_t i = 0; i < count; i++) {
6058        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6059                gCamCapability[cameraId]->supported_focus_modes[i]);
6060        if (NAME_NOT_FOUND != val) {
6061            avail_af_modes[size] = (uint8_t)val;
6062            size++;
6063        }
6064    }
6065    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6066                      avail_af_modes,
6067                      size);
6068
6069    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6070    size = 0;
6071    count = CAM_WB_MODE_MAX;
6072    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6073    for (size_t i = 0; i < count; i++) {
6074        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6075                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6076                gCamCapability[cameraId]->supported_white_balances[i]);
6077        if (NAME_NOT_FOUND != val) {
6078            avail_awb_modes[size] = (uint8_t)val;
6079            size++;
6080        }
6081    }
6082    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6083                      avail_awb_modes,
6084                      size);
6085
6086    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6087    count = CAM_FLASH_FIRING_LEVEL_MAX;
6088    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6089            count);
6090    for (size_t i = 0; i < count; i++) {
6091        available_flash_levels[i] =
6092                gCamCapability[cameraId]->supported_firing_levels[i];
6093    }
6094    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6095            available_flash_levels, count);
6096
6097    uint8_t flashAvailable;
6098    if (gCamCapability[cameraId]->flash_available)
6099        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6100    else
6101        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6102    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6103            &flashAvailable, 1);
6104
6105    Vector<uint8_t> avail_ae_modes;
6106    count = CAM_AE_MODE_MAX;
6107    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6108    for (size_t i = 0; i < count; i++) {
6109        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6110    }
6111    if (flashAvailable) {
6112        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6113        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6114    }
6115    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6116                      avail_ae_modes.array(),
6117                      avail_ae_modes.size());
6118
6119    int32_t sensitivity_range[2];
6120    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6121    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6122    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6123                      sensitivity_range,
6124                      sizeof(sensitivity_range) / sizeof(int32_t));
6125
6126    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6127                      &gCamCapability[cameraId]->max_analog_sensitivity,
6128                      1);
6129
6130    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6131    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6132                      &sensor_orientation,
6133                      1);
6134
6135    int32_t max_output_streams[] = {
6136            MAX_STALLING_STREAMS,
6137            MAX_PROCESSED_STREAMS,
6138            MAX_RAW_STREAMS};
6139    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6140            max_output_streams,
6141            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6142
6143    uint8_t avail_leds = 0;
6144    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6145                      &avail_leds, 0);
6146
6147    uint8_t focus_dist_calibrated;
6148    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6149            gCamCapability[cameraId]->focus_dist_calibrated);
6150    if (NAME_NOT_FOUND != val) {
6151        focus_dist_calibrated = (uint8_t)val;
6152        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6153                     &focus_dist_calibrated, 1);
6154    }
6155
6156    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6157    size = 0;
6158    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6159            MAX_TEST_PATTERN_CNT);
6160    for (size_t i = 0; i < count; i++) {
6161        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6162                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6163        if (NAME_NOT_FOUND != testpatternMode) {
6164            avail_testpattern_modes[size] = testpatternMode;
6165            size++;
6166        }
6167    }
6168    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6169                      avail_testpattern_modes,
6170                      size);
6171
6172    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6173    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6174                      &max_pipeline_depth,
6175                      1);
6176
6177    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6178    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6179                      &partial_result_count,
6180                       1);
6181
6182    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6183    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6184
6185    Vector<uint8_t> available_capabilities;
6186    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6187    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6188    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6189    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6190    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6191    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6192    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6193    if (hfrEnable && available_hfr_configs.array()) {
6194        available_capabilities.add(
6195                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6196    }
6197
6198    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6199        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6200    }
6201    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6202            available_capabilities.array(),
6203            available_capabilities.size());
6204
6205    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6206    //BURST_CAPTURE.
6207    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6208            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6209
6210    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6211            &aeLockAvailable, 1);
6212
6213    //awbLockAvailable to be set to true if capabilities has
6214    //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6215    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6216            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6217
6218    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6219            &awbLockAvailable, 1);
6220
6221    int32_t max_input_streams = 1;
6222    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6223                      &max_input_streams,
6224                      1);
6225
6226    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6227    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6228            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6229            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6230            HAL_PIXEL_FORMAT_YCbCr_420_888};
6231    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6232                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6233
6234    int32_t max_latency = (limitedDevice) ?
6235            CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6236    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6237                      &max_latency,
6238                      1);
6239
6240    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6241                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6242    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6243            available_hot_pixel_modes,
6244            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6245
6246    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6247                                         ANDROID_SHADING_MODE_FAST,
6248                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6249    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6250                      available_shading_modes,
6251                      3);
6252
6253    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6254                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6255    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6256                      available_lens_shading_map_modes,
6257                      2);
6258
6259    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6260                                      ANDROID_EDGE_MODE_FAST,
6261                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6262                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6263    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6264            available_edge_modes,
6265            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6266
6267    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6268                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6269                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6270                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6271                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6272    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6273            available_noise_red_modes,
6274            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6275
6276    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6277                                         ANDROID_TONEMAP_MODE_FAST,
6278                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6279    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6280            available_tonemap_modes,
6281            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6282
6283    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6284    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6285            available_hot_pixel_map_modes,
6286            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6287
6288    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6289            gCamCapability[cameraId]->reference_illuminant1);
6290    if (NAME_NOT_FOUND != val) {
6291        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6292        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6293    }
6294
6295    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6296            gCamCapability[cameraId]->reference_illuminant2);
6297    if (NAME_NOT_FOUND != val) {
6298        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6299        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6300    }
6301
6302    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6303            (void *)gCamCapability[cameraId]->forward_matrix1,
6304            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6305
6306    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6307            (void *)gCamCapability[cameraId]->forward_matrix2,
6308            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6309
6310    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6311            (void *)gCamCapability[cameraId]->color_transform1,
6312            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6313
6314    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6315            (void *)gCamCapability[cameraId]->color_transform2,
6316            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6317
6318    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6319            (void *)gCamCapability[cameraId]->calibration_transform1,
6320            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6321
6322    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6323            (void *)gCamCapability[cameraId]->calibration_transform2,
6324            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6325
6326    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6327       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6328       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6329       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6330       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6331       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6332       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6333       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6334       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6335       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6336       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6337       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6338       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6339       ANDROID_JPEG_GPS_COORDINATES,
6340       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6341       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6342       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6343       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6344       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6345       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6346       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6347       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6348       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6349       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6350       ANDROID_STATISTICS_FACE_DETECT_MODE,
6351       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6352       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6353       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6354       ANDROID_BLACK_LEVEL_LOCK };
6355
6356    size_t request_keys_cnt =
6357            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6358    Vector<int32_t> available_request_keys;
6359    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6360    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6361        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6362    }
6363
6364    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6365            available_request_keys.array(), available_request_keys.size());
6366
6367    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6368       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6369       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6370       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6371       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6372       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6373       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6374       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6375       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6376       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6377       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6378       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6379       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6380       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6381       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6382       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6383       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6384       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6385       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6386       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6387       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6388       ANDROID_STATISTICS_FACE_SCORES};
6389    size_t result_keys_cnt =
6390            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6391
6392    Vector<int32_t> available_result_keys;
6393    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6394    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6395        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6396    }
6397    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6398       available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6399       available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6400    }
6401    if (supportedFaceDetectMode == 1) {
6402        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6403        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6404    } else if ((supportedFaceDetectMode == 2) ||
6405            (supportedFaceDetectMode == 3)) {
6406        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6407        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6408    }
6409    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6410            available_result_keys.array(), available_result_keys.size());
6411
6412    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6413       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6414       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6415       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6416       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6417       ANDROID_SCALER_CROPPING_TYPE,
6418       ANDROID_SYNC_MAX_LATENCY,
6419       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6420       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6421       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6422       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6423       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6424       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6425       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6426       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6427       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6428       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6429       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6430       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6431       ANDROID_LENS_FACING,
6432       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6433       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6434       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6435       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6436       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6437       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6438       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6439       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6440       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6441       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6442       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6443       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6444       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6445       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6446       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6447       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6448       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6449       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6450       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6451       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6452       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6453       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6454       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6455       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6456       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6457       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6458       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6459       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6460       ANDROID_TONEMAP_MAX_CURVE_POINTS,
6461       ANDROID_CONTROL_AVAILABLE_MODES,
6462       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6463       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6464       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6465       ANDROID_SHADING_AVAILABLE_MODES,
6466       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6467    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6468                      available_characteristics_keys,
6469                      sizeof(available_characteristics_keys)/sizeof(int32_t));
6470
6471    /*available stall durations depend on the hw + sw and will be different for different devices */
6472    /*have to add for raw after implementation*/
6473    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6474    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6475
6476    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6477    size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6478            MAX_SIZES_CNT);
6479    size_t available_stall_size = count * 4;
6480    int64_t available_stall_durations[available_stall_size];
6481    idx = 0;
6482    for (uint32_t j = 0; j < stall_formats_count; j++) {
6483       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6484          for (uint32_t i = 0; i < count; i++) {
6485             available_stall_durations[idx]   = stall_formats[j];
6486             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6487             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6488             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6489             idx+=4;
6490          }
6491       } else {
6492          for (uint32_t i = 0; i < raw_count; i++) {
6493             available_stall_durations[idx]   = stall_formats[j];
6494             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6495             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6496             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6497             idx+=4;
6498          }
6499       }
6500    }
6501    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6502                      available_stall_durations,
6503                      idx);
6504    //QCAMERA3_OPAQUE_RAW
6505    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6506    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6507    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6508    case LEGACY_RAW:
6509        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6510            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6511        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6512            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6513        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6514            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6515        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6516        break;
6517    case MIPI_RAW:
6518        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6519            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6520        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6521            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6522        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6523            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6524        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6525        break;
6526    default:
6527        ALOGE("%s: unknown opaque_raw_format %d", __func__,
6528                gCamCapability[cameraId]->opaque_raw_fmt);
6529        break;
6530    }
6531    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6532
6533    int32_t strides[3*raw_count];
6534    for (size_t i = 0; i < raw_count; i++) {
6535        cam_stream_buf_plane_info_t buf_planes;
6536        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6537        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6538        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6539            &gCamCapability[cameraId]->padding_info, &buf_planes);
6540        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6541    }
6542    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6543            3*raw_count);
6544
6545    gStaticMetadata[cameraId] = staticInfo.release();
6546    return rc;
6547}
6548
6549/*===========================================================================
6550 * FUNCTION   : makeTable
6551 *
6552 * DESCRIPTION: make a table of sizes
6553 *
6554 * PARAMETERS :
6555 *
6556 *
6557 *==========================================================================*/
6558void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6559        size_t max_size, int32_t *sizeTable)
6560{
6561    size_t j = 0;
6562    if (size > max_size) {
6563       size = max_size;
6564    }
6565    for (size_t i = 0; i < size; i++) {
6566        sizeTable[j] = dimTable[i].width;
6567        sizeTable[j+1] = dimTable[i].height;
6568        j+=2;
6569    }
6570}
6571
6572/*===========================================================================
6573 * FUNCTION   : makeFPSTable
6574 *
6575 * DESCRIPTION: make a table of fps ranges
6576 *
6577 * PARAMETERS :
6578 *
6579 *==========================================================================*/
6580void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6581        size_t max_size, int32_t *fpsRangesTable)
6582{
6583    size_t j = 0;
6584    if (size > max_size) {
6585       size = max_size;
6586    }
6587    for (size_t i = 0; i < size; i++) {
6588        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6589        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6590        j+=2;
6591    }
6592}
6593
6594/*===========================================================================
6595 * FUNCTION   : makeOverridesList
6596 *
6597 * DESCRIPTION: make a list of scene mode overrides
6598 *
6599 * PARAMETERS :
6600 *
6601 *
6602 *==========================================================================*/
6603void QCamera3HardwareInterface::makeOverridesList(
6604        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6605        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6606{
6607    /*daemon will give a list of overrides for all scene modes.
6608      However we should send the fwk only the overrides for the scene modes
6609      supported by the framework*/
6610    size_t j = 0;
6611    if (size > max_size) {
6612       size = max_size;
6613    }
6614    size_t focus_count = CAM_FOCUS_MODE_MAX;
6615    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6616            focus_count);
6617    for (size_t i = 0; i < size; i++) {
6618        bool supt = false;
6619        size_t index = supported_indexes[i];
6620        overridesList[j] = gCamCapability[camera_id]->flash_available ?
6621                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6622        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6623                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6624                overridesTable[index].awb_mode);
6625        if (NAME_NOT_FOUND != val) {
6626            overridesList[j+1] = (uint8_t)val;
6627        }
6628        uint8_t focus_override = overridesTable[index].af_mode;
6629        for (size_t k = 0; k < focus_count; k++) {
6630           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6631              supt = true;
6632              break;
6633           }
6634        }
6635        if (supt) {
6636            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6637                    focus_override);
6638            if (NAME_NOT_FOUND != val) {
6639                overridesList[j+2] = (uint8_t)val;
6640            }
6641        } else {
6642           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6643        }
6644        j+=3;
6645    }
6646}
6647
6648/*===========================================================================
6649 * FUNCTION   : filterJpegSizes
6650 *
6651 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6652 *              could be downscaled to
6653 *
6654 * PARAMETERS :
6655 *
6656 * RETURN     : length of jpegSizes array
6657 *==========================================================================*/
6658
6659size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6660        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6661        uint8_t downscale_factor)
6662{
6663    if (0 == downscale_factor) {
6664        downscale_factor = 1;
6665    }
6666
6667    int32_t min_width = active_array_size.width / downscale_factor;
6668    int32_t min_height = active_array_size.height / downscale_factor;
6669    size_t jpegSizesCnt = 0;
6670    if (processedSizesCnt > maxCount) {
6671        processedSizesCnt = maxCount;
6672    }
6673    for (size_t i = 0; i < processedSizesCnt; i+=2) {
6674        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
6675            jpegSizes[jpegSizesCnt] = processedSizes[i];
6676            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
6677            jpegSizesCnt += 2;
6678        }
6679    }
6680    return jpegSizesCnt;
6681}
6682
6683/*===========================================================================
6684 * FUNCTION   : getPreviewHalPixelFormat
6685 *
6686 * DESCRIPTION: convert the format to type recognized by framework
6687 *
6688 * PARAMETERS : format : the format from backend
6689 *
6690 ** RETURN    : format recognized by framework
6691 *
6692 *==========================================================================*/
6693int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
6694{
6695    int32_t halPixelFormat;
6696
6697    switch (format) {
6698    case CAM_FORMAT_YUV_420_NV12:
6699        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
6700        break;
6701    case CAM_FORMAT_YUV_420_NV21:
6702        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6703        break;
6704    case CAM_FORMAT_YUV_420_NV21_ADRENO:
6705        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
6706        break;
6707    case CAM_FORMAT_YUV_420_YV12:
6708        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
6709        break;
6710    case CAM_FORMAT_YUV_422_NV16:
6711    case CAM_FORMAT_YUV_422_NV61:
6712    default:
6713        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6714        break;
6715    }
6716    return halPixelFormat;
6717}
6718
6719/*===========================================================================
6720 * FUNCTION   : computeNoiseModelEntryS
6721 *
6722 * DESCRIPTION: function to map a given sensitivity to the S noise
6723 *              model parameters in the DNG noise model.
6724 *
6725 * PARAMETERS : sens : the sensor sensitivity
6726 *
6727 ** RETURN    : S (sensor amplification) noise
6728 *
6729 *==========================================================================*/
6730double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
6731    double s = gCamCapability[mCameraId]->gradient_S * sens +
6732            gCamCapability[mCameraId]->offset_S;
6733    return ((s < 0.0) ? 0.0 : s);
6734}
6735
6736/*===========================================================================
6737 * FUNCTION   : computeNoiseModelEntryO
6738 *
6739 * DESCRIPTION: function to map a given sensitivity to the O noise
6740 *              model parameters in the DNG noise model.
6741 *
6742 * PARAMETERS : sens : the sensor sensitivity
6743 *
6744 ** RETURN    : O (sensor readout) noise
6745 *
6746 *==========================================================================*/
6747double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
6748    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
6749    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
6750            1.0 : (1.0 * sens / max_analog_sens);
6751    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
6752            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
6753    return ((o < 0.0) ? 0.0 : o);
6754}
6755
6756/*===========================================================================
6757 * FUNCTION   : getSensorSensitivity
6758 *
6759 * DESCRIPTION: convert iso_mode to an integer value
6760 *
6761 * PARAMETERS : iso_mode : the iso_mode supported by sensor
6762 *
6763 ** RETURN    : sensitivity supported by sensor
6764 *
6765 *==========================================================================*/
6766int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
6767{
6768    int32_t sensitivity;
6769
6770    switch (iso_mode) {
6771    case CAM_ISO_MODE_100:
6772        sensitivity = 100;
6773        break;
6774    case CAM_ISO_MODE_200:
6775        sensitivity = 200;
6776        break;
6777    case CAM_ISO_MODE_400:
6778        sensitivity = 400;
6779        break;
6780    case CAM_ISO_MODE_800:
6781        sensitivity = 800;
6782        break;
6783    case CAM_ISO_MODE_1600:
6784        sensitivity = 1600;
6785        break;
6786    default:
6787        sensitivity = -1;
6788        break;
6789    }
6790    return sensitivity;
6791}
6792
6793/*===========================================================================
6794 * FUNCTION   : getCamInfo
6795 *
6796 * DESCRIPTION: query camera capabilities
6797 *
6798 * PARAMETERS :
6799 *   @cameraId  : camera Id
6800 *   @info      : camera info struct to be filled in with camera capabilities
6801 *
6802 * RETURN     : int type of status
6803 *              NO_ERROR  -- success
6804 *              none-zero failure code
6805 *==========================================================================*/
6806int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
6807        struct camera_info *info)
6808{
6809    ATRACE_CALL();
6810    int rc = 0;
6811
6812    pthread_mutex_lock(&gCamLock);
6813    if (NULL == gCamCapability[cameraId]) {
6814        rc = initCapabilities(cameraId);
6815        if (rc < 0) {
6816            pthread_mutex_unlock(&gCamLock);
6817            return rc;
6818        }
6819    }
6820
6821    if (NULL == gStaticMetadata[cameraId]) {
6822        rc = initStaticMetadata(cameraId);
6823        if (rc < 0) {
6824            pthread_mutex_unlock(&gCamLock);
6825            return rc;
6826        }
6827    }
6828
6829    switch(gCamCapability[cameraId]->position) {
6830    case CAM_POSITION_BACK:
6831        info->facing = CAMERA_FACING_BACK;
6832        break;
6833
6834    case CAM_POSITION_FRONT:
6835        info->facing = CAMERA_FACING_FRONT;
6836        break;
6837
6838    default:
6839        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
6840        rc = -1;
6841        break;
6842    }
6843
6844
6845    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
6846    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
6847    info->static_camera_characteristics = gStaticMetadata[cameraId];
6848
6849    //For now assume both cameras can operate independently.
6850    info->conflicting_devices = NULL;
6851    info->conflicting_devices_length = 0;
6852
6853    //resource cost is 100 * MIN(1.0, m/M),
6854    //where m is throughput requirement with maximum stream configuration
6855    //and M is CPP maximum throughput.
6856    float max_fps = 0.0;
6857    for (uint32_t i = 0;
6858            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
6859        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
6860            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
6861    }
6862    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
6863            gCamCapability[cameraId]->active_array_size.width *
6864            gCamCapability[cameraId]->active_array_size.height * max_fps /
6865            gCamCapability[cameraId]->max_pixel_bandwidth;
6866    info->resource_cost = 100 * MIN(1.0, ratio);
6867    ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
6868            info->resource_cost);
6869
6870    pthread_mutex_unlock(&gCamLock);
6871    return rc;
6872}
6873
6874/*===========================================================================
6875 * FUNCTION   : translateCapabilityToMetadata
6876 *
6877 * DESCRIPTION: translate the capability into camera_metadata_t
6878 *
6879 * PARAMETERS : type of the request
6880 *
6881 *
6882 * RETURN     : success: camera_metadata_t*
6883 *              failure: NULL
6884 *
6885 *==========================================================================*/
6886camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
6887{
6888    if (mDefaultMetadata[type] != NULL) {
6889        return mDefaultMetadata[type];
6890    }
6891    //first time we are handling this request
6892    //fill up the metadata structure using the wrapper class
6893    CameraMetadata settings;
6894    //translate from cam_capability_t to camera_metadata_tag_t
6895    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
6896    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
6897    int32_t defaultRequestID = 0;
6898    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
6899
6900    /* OIS disable */
6901    char ois_prop[PROPERTY_VALUE_MAX];
6902    memset(ois_prop, 0, sizeof(ois_prop));
6903    property_get("persist.camera.ois.disable", ois_prop, "0");
6904    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
6905
6906    /* Force video to use OIS */
6907    char videoOisProp[PROPERTY_VALUE_MAX];
6908    memset(videoOisProp, 0, sizeof(videoOisProp));
6909    property_get("persist.camera.ois.video", videoOisProp, "1");
6910    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
6911
6912    // EIS enable/disable
6913    char eis_prop[PROPERTY_VALUE_MAX];
6914    memset(eis_prop, 0, sizeof(eis_prop));
6915    property_get("persist.camera.eis.enable", eis_prop, "0");
6916    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6917
6918    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
6919    // This is a bit hacky. EIS is enabled only when the above setprop
6920    // is set to non-zero value and on back camera (for 2015 Nexus).
6921    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
6922    // configureStream is called before this function. In other words,
6923    // we cannot guarantee the app will call configureStream before
6924    // calling createDefaultRequest.
6925    const bool eisEnabled = facingBack && eis_prop_set;
6926
6927    uint8_t controlIntent = 0;
6928    uint8_t focusMode;
6929    uint8_t vsMode;
6930    uint8_t optStabMode;
6931    uint8_t cacMode;
6932    uint8_t edge_mode;
6933    uint8_t noise_red_mode;
6934    uint8_t tonemap_mode;
6935    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6936    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6937    switch (type) {
6938      case CAMERA3_TEMPLATE_PREVIEW:
6939        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
6940        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6941        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6942        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6943        edge_mode = ANDROID_EDGE_MODE_FAST;
6944        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6945        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6946        break;
6947      case CAMERA3_TEMPLATE_STILL_CAPTURE:
6948        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
6949        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6950        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6951        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
6952        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
6953        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
6954        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
6955        break;
6956      case CAMERA3_TEMPLATE_VIDEO_RECORD:
6957        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
6958        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6959        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6960        if (eisEnabled) {
6961            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
6962        }
6963        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6964        edge_mode = ANDROID_EDGE_MODE_FAST;
6965        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6966        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6967        if (forceVideoOis)
6968            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6969        break;
6970      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
6971        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
6972        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6973        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6974        if (eisEnabled) {
6975            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
6976        }
6977        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6978        edge_mode = ANDROID_EDGE_MODE_FAST;
6979        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6980        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6981        if (forceVideoOis)
6982            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6983        break;
6984      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
6985        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
6986        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6987        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6988        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6989        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
6990        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
6991        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6992        break;
6993      case CAMERA3_TEMPLATE_MANUAL:
6994        edge_mode = ANDROID_EDGE_MODE_FAST;
6995        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6996        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6997        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
6998        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
6999        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7000        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7001        break;
7002      default:
7003        edge_mode = ANDROID_EDGE_MODE_FAST;
7004        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7005        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7006        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7007        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7008        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7009        break;
7010    }
7011    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7012    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7013    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7014    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7015        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7016    }
7017    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7018
7019    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7020            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7021        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7022    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7023            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7024            || ois_disable)
7025        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7026    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7027
7028    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7029            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7030
7031    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7032    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7033
7034    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7035    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7036
7037    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7038    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7039
7040    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7041    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7042
7043    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7044    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7045
7046    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7047    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7048
7049    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7050    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7051
7052    /*flash*/
7053    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7054    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7055
7056    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7057    settings.update(ANDROID_FLASH_FIRING_POWER,
7058            &flashFiringLevel, 1);
7059
7060    /* lens */
7061    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7062    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7063
7064    if (gCamCapability[mCameraId]->filter_densities_count) {
7065        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7066        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7067                        gCamCapability[mCameraId]->filter_densities_count);
7068    }
7069
7070    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7071    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7072
7073    float default_focus_distance = 0;
7074    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7075
7076    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7077    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7078
7079    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7080    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7081
7082    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7083    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7084
7085    /* face detection (default to OFF) */
7086    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7087    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7088
7089    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7090    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7091
7092    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7093    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7094
7095    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7096    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7097
7098    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7099    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7100
7101    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7102    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7103
7104    /* Exposure time(Update the Min Exposure Time)*/
7105    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7106    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7107
7108    /* frame duration */
7109    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7110    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7111
7112    /* sensitivity */
7113    static const int32_t default_sensitivity = 100;
7114    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7115
7116    /*edge mode*/
7117    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7118
7119    /*noise reduction mode*/
7120    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7121
7122    /*color correction mode*/
7123    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7124    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7125
7126    /*transform matrix mode*/
7127    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7128
7129    int32_t scaler_crop_region[4];
7130    scaler_crop_region[0] = 0;
7131    scaler_crop_region[1] = 0;
7132    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7133    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7134    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7135
7136    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7137    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7138
7139    /*focus distance*/
7140    float focus_distance = 0.0;
7141    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7142
7143    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7144    float max_range = 0.0;
7145    float max_fixed_fps = 0.0;
7146    int32_t fps_range[2] = {0, 0};
7147    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7148            i++) {
7149        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7150            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7151        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7152                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7153                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7154            if (range > max_range) {
7155                fps_range[0] =
7156                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7157                fps_range[1] =
7158                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7159                max_range = range;
7160            }
7161        } else {
7162            if (range < 0.01 && max_fixed_fps <
7163                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7164                fps_range[0] =
7165                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7166                fps_range[1] =
7167                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7168                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7169            }
7170        }
7171    }
7172    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7173
7174    /*precapture trigger*/
7175    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7176    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7177
7178    /*af trigger*/
7179    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7180    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7181
7182    /* ae & af regions */
7183    int32_t active_region[] = {
7184            gCamCapability[mCameraId]->active_array_size.left,
7185            gCamCapability[mCameraId]->active_array_size.top,
7186            gCamCapability[mCameraId]->active_array_size.left +
7187                    gCamCapability[mCameraId]->active_array_size.width,
7188            gCamCapability[mCameraId]->active_array_size.top +
7189                    gCamCapability[mCameraId]->active_array_size.height,
7190            0};
7191    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7192            sizeof(active_region) / sizeof(active_region[0]));
7193    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7194            sizeof(active_region) / sizeof(active_region[0]));
7195
7196    /* black level lock */
7197    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7198    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7199
7200    /* lens shading map mode */
7201    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7202    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7203        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7204    }
7205    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7206
7207    //special defaults for manual template
7208    if (type == CAMERA3_TEMPLATE_MANUAL) {
7209        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7210        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7211
7212        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7213        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7214
7215        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7216        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7217
7218        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7219        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7220
7221        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7222        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7223
7224        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7225        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7226    }
7227
7228
7229    /* TNR
7230     * We'll use this location to determine which modes TNR will be set.
7231     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7232     * This is not to be confused with linking on a per stream basis that decision
7233     * is still on per-session basis and will be handled as part of config stream
7234     */
7235    uint8_t tnr_enable = 0;
7236
7237    if (m_bTnrPreview || m_bTnrVideo) {
7238
7239        switch (type) {
7240            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7241            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7242                    tnr_enable = 1;
7243                    break;
7244
7245            default:
7246                    tnr_enable = 0;
7247                    break;
7248        }
7249
7250        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7251        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7252        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7253
7254        CDBG("%s: TNR:%d with process plate %d for template:%d",
7255                            __func__, tnr_enable, tnr_process_type, type);
7256    }
7257
7258    /* CDS default */
7259    char prop[PROPERTY_VALUE_MAX];
7260    memset(prop, 0, sizeof(prop));
7261    property_get("persist.camera.CDS", prop, "Auto");
7262    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7263    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7264    if (CAM_CDS_MODE_MAX == cds_mode) {
7265        cds_mode = CAM_CDS_MODE_AUTO;
7266    }
7267    m_CdsPreference = cds_mode;
7268
7269    /* Disabling CDS in templates which have TNR enabled*/
7270    if (tnr_enable)
7271        cds_mode = CAM_CDS_MODE_OFF;
7272
7273    int32_t mode = cds_mode;
7274    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7275    mDefaultMetadata[type] = settings.release();
7276
7277    return mDefaultMetadata[type];
7278}
7279
7280/*===========================================================================
7281 * FUNCTION   : setFrameParameters
7282 *
7283 * DESCRIPTION: set parameters per frame as requested in the metadata from
7284 *              framework
7285 *
7286 * PARAMETERS :
7287 *   @request   : request that needs to be serviced
7288 *   @streamID : Stream ID of all the requested streams
7289 *   @blob_request: Whether this request is a blob request or not
7290 *
7291 * RETURN     : success: NO_ERROR
7292 *              failure:
7293 *==========================================================================*/
7294int QCamera3HardwareInterface::setFrameParameters(
7295                    camera3_capture_request_t *request,
7296                    cam_stream_ID_t streamID,
7297                    int blob_request,
7298                    uint32_t snapshotStreamId)
7299{
7300    /*translate from camera_metadata_t type to parm_type_t*/
7301    int rc = 0;
7302    int32_t hal_version = CAM_HAL_V3;
7303
7304    clear_metadata_buffer(mParameters);
7305    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7306        ALOGE("%s: Failed to set hal version in the parameters", __func__);
7307        return BAD_VALUE;
7308    }
7309
7310    /*we need to update the frame number in the parameters*/
7311    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7312            request->frame_number)) {
7313        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7314        return BAD_VALUE;
7315    }
7316
7317    /* Update stream id of all the requested buffers */
7318    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7319        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7320        return BAD_VALUE;
7321    }
7322
7323    if (mUpdateDebugLevel) {
7324        uint32_t dummyDebugLevel = 0;
7325        /* The value of dummyDebugLevel is irrelavent. On
7326         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7327        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7328                dummyDebugLevel)) {
7329            ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7330            return BAD_VALUE;
7331        }
7332        mUpdateDebugLevel = false;
7333    }
7334
7335    if(request->settings != NULL){
7336        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7337        if (blob_request)
7338            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7339    }
7340
7341    return rc;
7342}
7343
7344/*===========================================================================
7345 * FUNCTION   : setReprocParameters
7346 *
7347 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7348 *              return it.
7349 *
7350 * PARAMETERS :
7351 *   @request   : request that needs to be serviced
7352 *
7353 * RETURN     : success: NO_ERROR
7354 *              failure:
7355 *==========================================================================*/
7356int32_t QCamera3HardwareInterface::setReprocParameters(
7357        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7358        uint32_t snapshotStreamId)
7359{
7360    /*translate from camera_metadata_t type to parm_type_t*/
7361    int rc = 0;
7362
7363    if (NULL == request->settings){
7364        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7365        return BAD_VALUE;
7366    }
7367
7368    if (NULL == reprocParam) {
7369        ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7370        return BAD_VALUE;
7371    }
7372    clear_metadata_buffer(reprocParam);
7373
7374    /*we need to update the frame number in the parameters*/
7375    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7376            request->frame_number)) {
7377        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7378        return BAD_VALUE;
7379    }
7380
7381    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7382    if (rc < 0) {
7383        ALOGE("%s: Failed to translate reproc request", __func__);
7384        return rc;
7385    }
7386
7387    CameraMetadata frame_settings;
7388    frame_settings = request->settings;
7389    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7390            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7391        int32_t *crop_count =
7392                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7393        int32_t *crop_data =
7394                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7395        int32_t *roi_map =
7396                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7397        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7398            cam_crop_data_t crop_meta;
7399            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7400            crop_meta.num_of_streams = 1;
7401            crop_meta.crop_info[0].crop.left   = crop_data[0];
7402            crop_meta.crop_info[0].crop.top    = crop_data[1];
7403            crop_meta.crop_info[0].crop.width  = crop_data[2];
7404            crop_meta.crop_info[0].crop.height = crop_data[3];
7405
7406            crop_meta.crop_info[0].roi_map.left =
7407                    roi_map[0];
7408            crop_meta.crop_info[0].roi_map.top =
7409                    roi_map[1];
7410            crop_meta.crop_info[0].roi_map.width =
7411                    roi_map[2];
7412            crop_meta.crop_info[0].roi_map.height =
7413                    roi_map[3];
7414
7415            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7416                rc = BAD_VALUE;
7417            }
7418            CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7419                    __func__,
7420                    request->input_buffer->stream,
7421                    crop_meta.crop_info[0].crop.left,
7422                    crop_meta.crop_info[0].crop.top,
7423                    crop_meta.crop_info[0].crop.width,
7424                    crop_meta.crop_info[0].crop.height);
7425            CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7426                    __func__,
7427                    request->input_buffer->stream,
7428                    crop_meta.crop_info[0].roi_map.left,
7429                    crop_meta.crop_info[0].roi_map.top,
7430                    crop_meta.crop_info[0].roi_map.width,
7431                    crop_meta.crop_info[0].roi_map.height);
7432            } else {
7433                ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7434            }
7435    } else {
7436        ALOGE("%s: No crop data from matching output stream", __func__);
7437    }
7438
7439    /* These settings are not needed for regular requests so handle them specially for
7440       reprocess requests; information needed for EXIF tags */
7441    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7442        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7443                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7444        if (NAME_NOT_FOUND != val) {
7445            uint32_t flashMode = (uint32_t)val;
7446            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7447                rc = BAD_VALUE;
7448            }
7449        } else {
7450            ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7451                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7452        }
7453    } else {
7454        CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7455    }
7456
7457    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7458        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7459        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7460            rc = BAD_VALUE;
7461        }
7462    } else {
7463        CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7464    }
7465
7466    return rc;
7467}
7468
7469/*===========================================================================
7470 * FUNCTION   : saveRequestSettings
7471 *
7472 * DESCRIPTION: Add any settings that might have changed to the request settings
7473 *              and save the settings to be applied on the frame
7474 *
7475 * PARAMETERS :
7476 *   @jpegMetadata : the extracted and/or modified jpeg metadata
7477 *   @request      : request with initial settings
7478 *
7479 * RETURN     :
7480 * camera_metadata_t* : pointer to the saved request settings
7481 *==========================================================================*/
7482camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7483        const CameraMetadata &jpegMetadata,
7484        camera3_capture_request_t *request)
7485{
7486    camera_metadata_t *resultMetadata;
7487    CameraMetadata camMetadata;
7488    camMetadata = request->settings;
7489
7490    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7491        int32_t thumbnail_size[2];
7492        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7493        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7494        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7495                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7496    }
7497
7498    resultMetadata = camMetadata.release();
7499    return resultMetadata;
7500}
7501
7502/*===========================================================================
7503 * FUNCTION   : setHalFpsRange
7504 *
7505 * DESCRIPTION: set FPS range parameter
7506 *
7507 *
7508 * PARAMETERS :
7509 *   @settings    : Metadata from framework
7510 *   @hal_metadata: Metadata buffer
7511 *
7512 *
7513 * RETURN     : success: NO_ERROR
7514 *              failure:
7515 *==========================================================================*/
7516int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7517        metadata_buffer_t *hal_metadata)
7518{
7519    int32_t rc = NO_ERROR;
7520    cam_fps_range_t fps_range;
7521    fps_range.min_fps = (float)
7522            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7523    fps_range.max_fps = (float)
7524            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7525    fps_range.video_min_fps = fps_range.min_fps;
7526    fps_range.video_max_fps = fps_range.max_fps;
7527
7528    CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7529            fps_range.min_fps, fps_range.max_fps);
7530    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7531     * follows:
7532     * ---------------------------------------------------------------|
7533     *      Video stream is absent in configure_streams               |
7534     *    (Camcorder preview before the first video record            |
7535     * ---------------------------------------------------------------|
7536     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7537     *                   |             |             | vid_min/max_fps|
7538     * ---------------------------------------------------------------|
7539     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7540     *                   |-------------|-------------|----------------|
7541     *                   |  [240, 240] |     240     |  [240, 240]    |
7542     * ---------------------------------------------------------------|
7543     *     Video stream is present in configure_streams               |
7544     * ---------------------------------------------------------------|
7545     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7546     *                   |             |             | vid_min/max_fps|
7547     * ---------------------------------------------------------------|
7548     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7549     * (camcorder prev   |-------------|-------------|----------------|
7550     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
7551     *  is stopped)      |             |             |                |
7552     * ---------------------------------------------------------------|
7553     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
7554     *                   |-------------|-------------|----------------|
7555     *                   |  [240, 240] |     240     |  [240, 240]    |
7556     * ---------------------------------------------------------------|
7557     * When Video stream is absent in configure_streams,
7558     * preview fps = sensor_fps / batchsize
7559     * Eg: for 240fps at batchSize 4, preview = 60fps
7560     *     for 120fps at batchSize 4, preview = 30fps
7561     *
7562     * When video stream is present in configure_streams, preview fps is as per
7563     * the ratio of preview buffers to video buffers requested in process
7564     * capture request
7565     */
7566    mBatchSize = 0;
7567    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7568        fps_range.min_fps = fps_range.video_max_fps;
7569        fps_range.video_min_fps = fps_range.video_max_fps;
7570        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7571                fps_range.max_fps);
7572        if (NAME_NOT_FOUND != val) {
7573            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7574            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7575                return BAD_VALUE;
7576            }
7577
7578            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7579                /* If batchmode is currently in progress and the fps changes,
7580                 * set the flag to restart the sensor */
7581                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7582                        (mHFRVideoFps != fps_range.max_fps)) {
7583                    mNeedSensorRestart = true;
7584                }
7585                mHFRVideoFps = fps_range.max_fps;
7586                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7587                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7588                    mBatchSize = MAX_HFR_BATCH_SIZE;
7589                }
7590             }
7591            CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7592
7593         }
7594    } else {
7595        /* HFR mode is session param in backend/ISP. This should be reset when
7596         * in non-HFR mode  */
7597        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7598        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7599            return BAD_VALUE;
7600        }
7601    }
7602    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7603        return BAD_VALUE;
7604    }
7605    CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7606            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7607    return rc;
7608}
7609
7610/*===========================================================================
7611 * FUNCTION   : translateToHalMetadata
7612 *
7613 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7614 *
7615 *
7616 * PARAMETERS :
7617 *   @request  : request sent from framework
7618 *
7619 *
7620 * RETURN     : success: NO_ERROR
7621 *              failure:
7622 *==========================================================================*/
7623int QCamera3HardwareInterface::translateToHalMetadata
7624                                  (const camera3_capture_request_t *request,
7625                                   metadata_buffer_t *hal_metadata,
7626                                   uint32_t snapshotStreamId)
7627{
7628    int rc = 0;
7629    CameraMetadata frame_settings;
7630    frame_settings = request->settings;
7631
7632    /* Do not change the order of the following list unless you know what you are
7633     * doing.
7634     * The order is laid out in such a way that parameters in the front of the table
7635     * may be used to override the parameters later in the table. Examples are:
7636     * 1. META_MODE should precede AEC/AWB/AF MODE
7637     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7638     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7639     * 4. Any mode should precede it's corresponding settings
7640     */
7641    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7642        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7643        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7644            rc = BAD_VALUE;
7645        }
7646        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7647        if (rc != NO_ERROR) {
7648            ALOGE("%s: extractSceneMode failed", __func__);
7649        }
7650    }
7651
7652    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7653        uint8_t fwk_aeMode =
7654            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7655        uint8_t aeMode;
7656        int32_t redeye;
7657
7658        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
7659            aeMode = CAM_AE_MODE_OFF;
7660        } else {
7661            aeMode = CAM_AE_MODE_ON;
7662        }
7663        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
7664            redeye = 1;
7665        } else {
7666            redeye = 0;
7667        }
7668
7669        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7670                fwk_aeMode);
7671        if (NAME_NOT_FOUND != val) {
7672            int32_t flashMode = (int32_t)val;
7673            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
7674        }
7675
7676        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
7677        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
7678            rc = BAD_VALUE;
7679        }
7680    }
7681
7682    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
7683        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
7684        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7685                fwk_whiteLevel);
7686        if (NAME_NOT_FOUND != val) {
7687            uint8_t whiteLevel = (uint8_t)val;
7688            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
7689                rc = BAD_VALUE;
7690            }
7691        }
7692    }
7693
7694    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
7695        uint8_t fwk_cacMode =
7696                frame_settings.find(
7697                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
7698        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7699                fwk_cacMode);
7700        if (NAME_NOT_FOUND != val) {
7701            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
7702            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
7703                rc = BAD_VALUE;
7704            }
7705        } else {
7706            ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
7707        }
7708    }
7709
7710    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
7711        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
7712        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7713                fwk_focusMode);
7714        if (NAME_NOT_FOUND != val) {
7715            uint8_t focusMode = (uint8_t)val;
7716            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
7717                rc = BAD_VALUE;
7718            }
7719        }
7720    }
7721
7722    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
7723        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
7724        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
7725                focalDistance)) {
7726            rc = BAD_VALUE;
7727        }
7728    }
7729
7730    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
7731        uint8_t fwk_antibandingMode =
7732                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
7733        int val = lookupHalName(ANTIBANDING_MODES_MAP,
7734                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
7735        if (NAME_NOT_FOUND != val) {
7736            uint32_t hal_antibandingMode = (uint32_t)val;
7737            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
7738                    hal_antibandingMode)) {
7739                rc = BAD_VALUE;
7740            }
7741        }
7742    }
7743
7744    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
7745        int32_t expCompensation = frame_settings.find(
7746                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
7747        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
7748            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
7749        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
7750            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
7751        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
7752                expCompensation)) {
7753            rc = BAD_VALUE;
7754        }
7755    }
7756
7757    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
7758        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
7759        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
7760            rc = BAD_VALUE;
7761        }
7762    }
7763    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
7764        rc = setHalFpsRange(frame_settings, hal_metadata);
7765        if (rc != NO_ERROR) {
7766            ALOGE("%s: setHalFpsRange failed", __func__);
7767        }
7768    }
7769
7770    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
7771        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
7772        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
7773            rc = BAD_VALUE;
7774        }
7775    }
7776
7777    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
7778        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
7779        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7780                fwk_effectMode);
7781        if (NAME_NOT_FOUND != val) {
7782            uint8_t effectMode = (uint8_t)val;
7783            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
7784                rc = BAD_VALUE;
7785            }
7786        }
7787    }
7788
7789    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
7790        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
7791        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
7792                colorCorrectMode)) {
7793            rc = BAD_VALUE;
7794        }
7795    }
7796
7797    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
7798        cam_color_correct_gains_t colorCorrectGains;
7799        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
7800            colorCorrectGains.gains[i] =
7801                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
7802        }
7803        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
7804                colorCorrectGains)) {
7805            rc = BAD_VALUE;
7806        }
7807    }
7808
7809    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
7810        cam_color_correct_matrix_t colorCorrectTransform;
7811        cam_rational_type_t transform_elem;
7812        size_t num = 0;
7813        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
7814           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
7815              transform_elem.numerator =
7816                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
7817              transform_elem.denominator =
7818                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
7819              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
7820              num++;
7821           }
7822        }
7823        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
7824                colorCorrectTransform)) {
7825            rc = BAD_VALUE;
7826        }
7827    }
7828
7829    cam_trigger_t aecTrigger;
7830    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
7831    aecTrigger.trigger_id = -1;
7832    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
7833        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
7834        aecTrigger.trigger =
7835            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
7836        aecTrigger.trigger_id =
7837            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
7838        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
7839                aecTrigger)) {
7840            rc = BAD_VALUE;
7841        }
7842        CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
7843                aecTrigger.trigger, aecTrigger.trigger_id);
7844    }
7845
7846    /*af_trigger must come with a trigger id*/
7847    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
7848        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
7849        cam_trigger_t af_trigger;
7850        af_trigger.trigger =
7851            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
7852        af_trigger.trigger_id =
7853            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
7854        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
7855            rc = BAD_VALUE;
7856        }
7857        CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
7858                af_trigger.trigger, af_trigger.trigger_id);
7859    }
7860
7861    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
7862        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
7863        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
7864            rc = BAD_VALUE;
7865        }
7866    }
7867    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
7868        cam_edge_application_t edge_application;
7869        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
7870        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
7871            edge_application.sharpness = 0;
7872        } else {
7873            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
7874        }
7875        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
7876            rc = BAD_VALUE;
7877        }
7878    }
7879
7880    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7881        int32_t respectFlashMode = 1;
7882        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7883            uint8_t fwk_aeMode =
7884                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7885            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
7886                respectFlashMode = 0;
7887                CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
7888                    __func__);
7889            }
7890        }
7891        if (respectFlashMode) {
7892            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7893                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7894            CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
7895            // To check: CAM_INTF_META_FLASH_MODE usage
7896            if (NAME_NOT_FOUND != val) {
7897                uint8_t flashMode = (uint8_t)val;
7898                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
7899                    rc = BAD_VALUE;
7900                }
7901            }
7902        }
7903    }
7904
7905    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
7906        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
7907        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
7908            rc = BAD_VALUE;
7909        }
7910    }
7911
7912    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
7913        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
7914        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
7915                flashFiringTime)) {
7916            rc = BAD_VALUE;
7917        }
7918    }
7919
7920    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
7921        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
7922        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
7923                hotPixelMode)) {
7924            rc = BAD_VALUE;
7925        }
7926    }
7927
7928    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
7929        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
7930        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
7931                lensAperture)) {
7932            rc = BAD_VALUE;
7933        }
7934    }
7935
7936    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
7937        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
7938        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
7939                filterDensity)) {
7940            rc = BAD_VALUE;
7941        }
7942    }
7943
7944    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
7945        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
7946        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
7947                focalLength)) {
7948            rc = BAD_VALUE;
7949        }
7950    }
7951
7952    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
7953        uint8_t optStabMode =
7954                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
7955        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
7956                optStabMode)) {
7957            rc = BAD_VALUE;
7958        }
7959    }
7960
7961    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
7962        uint8_t videoStabMode =
7963                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
7964        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
7965                videoStabMode)) {
7966            rc = BAD_VALUE;
7967        }
7968    }
7969
7970
7971    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
7972        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
7973        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
7974                noiseRedMode)) {
7975            rc = BAD_VALUE;
7976        }
7977    }
7978
7979    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
7980        float reprocessEffectiveExposureFactor =
7981            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
7982        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
7983                reprocessEffectiveExposureFactor)) {
7984            rc = BAD_VALUE;
7985        }
7986    }
7987
7988    cam_crop_region_t scalerCropRegion;
7989    bool scalerCropSet = false;
7990    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
7991        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
7992        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
7993        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
7994        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
7995
7996        // Map coordinate system from active array to sensor output.
7997        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
7998                scalerCropRegion.width, scalerCropRegion.height);
7999
8000        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8001                scalerCropRegion)) {
8002            rc = BAD_VALUE;
8003        }
8004        scalerCropSet = true;
8005    }
8006
8007    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8008        int64_t sensorExpTime =
8009                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8010        CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
8011        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8012                sensorExpTime)) {
8013            rc = BAD_VALUE;
8014        }
8015    }
8016
8017    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8018        int64_t sensorFrameDuration =
8019                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8020        int64_t minFrameDuration = getMinFrameDuration(request);
8021        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8022        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8023            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8024        CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
8025        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8026                sensorFrameDuration)) {
8027            rc = BAD_VALUE;
8028        }
8029    }
8030
8031    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8032        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8033        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8034                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8035        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8036                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8037        CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
8038        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8039                sensorSensitivity)) {
8040            rc = BAD_VALUE;
8041        }
8042    }
8043
8044    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8045        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8046        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8047            rc = BAD_VALUE;
8048        }
8049    }
8050
8051    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8052        uint8_t fwk_facedetectMode =
8053                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8054
8055        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8056                fwk_facedetectMode);
8057
8058        if (NAME_NOT_FOUND != val) {
8059            uint8_t facedetectMode = (uint8_t)val;
8060            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8061                    facedetectMode)) {
8062                rc = BAD_VALUE;
8063            }
8064        }
8065    }
8066
8067    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8068        uint8_t histogramMode =
8069                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8070        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8071                histogramMode)) {
8072            rc = BAD_VALUE;
8073        }
8074    }
8075
8076    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8077        uint8_t sharpnessMapMode =
8078                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8079        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8080                sharpnessMapMode)) {
8081            rc = BAD_VALUE;
8082        }
8083    }
8084
8085    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8086        uint8_t tonemapMode =
8087                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8088        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8089            rc = BAD_VALUE;
8090        }
8091    }
8092    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8093    /*All tonemap channels will have the same number of points*/
8094    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8095        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8096        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8097        cam_rgb_tonemap_curves tonemapCurves;
8098        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8099        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8100            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8101                    __func__, tonemapCurves.tonemap_points_cnt,
8102                    CAM_MAX_TONEMAP_CURVE_SIZE);
8103            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8104        }
8105
8106        /* ch0 = G*/
8107        size_t point = 0;
8108        cam_tonemap_curve_t tonemapCurveGreen;
8109        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8110            for (size_t j = 0; j < 2; j++) {
8111               tonemapCurveGreen.tonemap_points[i][j] =
8112                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8113               point++;
8114            }
8115        }
8116        tonemapCurves.curves[0] = tonemapCurveGreen;
8117
8118        /* ch 1 = B */
8119        point = 0;
8120        cam_tonemap_curve_t tonemapCurveBlue;
8121        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8122            for (size_t j = 0; j < 2; j++) {
8123               tonemapCurveBlue.tonemap_points[i][j] =
8124                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8125               point++;
8126            }
8127        }
8128        tonemapCurves.curves[1] = tonemapCurveBlue;
8129
8130        /* ch 2 = R */
8131        point = 0;
8132        cam_tonemap_curve_t tonemapCurveRed;
8133        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8134            for (size_t j = 0; j < 2; j++) {
8135               tonemapCurveRed.tonemap_points[i][j] =
8136                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8137               point++;
8138            }
8139        }
8140        tonemapCurves.curves[2] = tonemapCurveRed;
8141
8142        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8143                tonemapCurves)) {
8144            rc = BAD_VALUE;
8145        }
8146    }
8147
8148    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8149        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8150        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8151                captureIntent)) {
8152            rc = BAD_VALUE;
8153        }
8154    }
8155
8156    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8157        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8158        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8159                blackLevelLock)) {
8160            rc = BAD_VALUE;
8161        }
8162    }
8163
8164    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8165        uint8_t lensShadingMapMode =
8166                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8167        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8168                lensShadingMapMode)) {
8169            rc = BAD_VALUE;
8170        }
8171    }
8172
8173    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8174        cam_area_t roi;
8175        bool reset = true;
8176        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8177
8178        // Map coordinate system from active array to sensor output.
8179        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8180                roi.rect.height);
8181
8182        if (scalerCropSet) {
8183            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8184        }
8185        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8186            rc = BAD_VALUE;
8187        }
8188    }
8189
8190    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8191        cam_area_t roi;
8192        bool reset = true;
8193        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8194
8195        // Map coordinate system from active array to sensor output.
8196        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8197                roi.rect.height);
8198
8199        if (scalerCropSet) {
8200            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8201        }
8202        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8203            rc = BAD_VALUE;
8204        }
8205    }
8206
8207    if (m_bIs4KVideo) {
8208        /* Override needed for Video template in case of 4K video */
8209        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8210                CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8211            rc = BAD_VALUE;
8212        }
8213    } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8214            frame_settings.exists(QCAMERA3_CDS_MODE)) {
8215        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8216        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8217            ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8218        } else {
8219            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8220                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8221                rc = BAD_VALUE;
8222            }
8223        }
8224    }
8225
8226    // TNR
8227    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8228        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8229        uint8_t b_TnrRequested = 0;
8230        cam_denoise_param_t tnr;
8231        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8232        tnr.process_plates =
8233            (cam_denoise_process_type_t)frame_settings.find(
8234            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8235        b_TnrRequested = tnr.denoise_enable;
8236        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8237            rc = BAD_VALUE;
8238        }
8239    }
8240
8241    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8242        int32_t fwk_testPatternMode =
8243                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8244        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8245                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8246
8247        if (NAME_NOT_FOUND != testPatternMode) {
8248            cam_test_pattern_data_t testPatternData;
8249            memset(&testPatternData, 0, sizeof(testPatternData));
8250            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8251            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8252                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8253                int32_t *fwk_testPatternData =
8254                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8255                testPatternData.r = fwk_testPatternData[0];
8256                testPatternData.b = fwk_testPatternData[3];
8257                switch (gCamCapability[mCameraId]->color_arrangement) {
8258                    case CAM_FILTER_ARRANGEMENT_RGGB:
8259                    case CAM_FILTER_ARRANGEMENT_GRBG:
8260                        testPatternData.gr = fwk_testPatternData[1];
8261                        testPatternData.gb = fwk_testPatternData[2];
8262                        break;
8263                    case CAM_FILTER_ARRANGEMENT_GBRG:
8264                    case CAM_FILTER_ARRANGEMENT_BGGR:
8265                        testPatternData.gr = fwk_testPatternData[2];
8266                        testPatternData.gb = fwk_testPatternData[1];
8267                        break;
8268                    default:
8269                        ALOGE("%s: color arrangement %d is not supported", __func__,
8270                                gCamCapability[mCameraId]->color_arrangement);
8271                        break;
8272                }
8273            }
8274            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8275                    testPatternData)) {
8276                rc = BAD_VALUE;
8277            }
8278        } else {
8279            ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8280                    fwk_testPatternMode);
8281        }
8282    }
8283
8284    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8285        size_t count = 0;
8286        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8287        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8288                gps_coords.data.d, gps_coords.count, count);
8289        if (gps_coords.count != count) {
8290            rc = BAD_VALUE;
8291        }
8292    }
8293
8294    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8295        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8296        size_t count = 0;
8297        const char *gps_methods_src = (const char *)
8298                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8299        memset(gps_methods, '\0', sizeof(gps_methods));
8300        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8301        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8302                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8303        if (GPS_PROCESSING_METHOD_SIZE != count) {
8304            rc = BAD_VALUE;
8305        }
8306    }
8307
8308    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8309        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8310        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8311                gps_timestamp)) {
8312            rc = BAD_VALUE;
8313        }
8314    }
8315
8316    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8317        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8318        cam_rotation_info_t rotation_info;
8319        if (orientation == 0) {
8320           rotation_info.rotation = ROTATE_0;
8321        } else if (orientation == 90) {
8322           rotation_info.rotation = ROTATE_90;
8323        } else if (orientation == 180) {
8324           rotation_info.rotation = ROTATE_180;
8325        } else if (orientation == 270) {
8326           rotation_info.rotation = ROTATE_270;
8327        }
8328        rotation_info.streamId = snapshotStreamId;
8329        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8330        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8331            rc = BAD_VALUE;
8332        }
8333    }
8334
8335    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8336        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8337        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8338            rc = BAD_VALUE;
8339        }
8340    }
8341
8342    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8343        uint32_t thumb_quality = (uint32_t)
8344                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8345        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8346                thumb_quality)) {
8347            rc = BAD_VALUE;
8348        }
8349    }
8350
8351    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8352        cam_dimension_t dim;
8353        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8354        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8355        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8356            rc = BAD_VALUE;
8357        }
8358    }
8359
8360    // Internal metadata
8361    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8362        size_t count = 0;
8363        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8364        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8365                privatedata.data.i32, privatedata.count, count);
8366        if (privatedata.count != count) {
8367            rc = BAD_VALUE;
8368        }
8369    }
8370
8371    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8372        uint8_t* use_av_timer =
8373                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8374        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8375            rc = BAD_VALUE;
8376        }
8377    }
8378
8379    // EV step
8380    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8381            gCamCapability[mCameraId]->exp_compensation_step)) {
8382        rc = BAD_VALUE;
8383    }
8384
8385    // CDS info
8386    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8387        cam_cds_data_t *cdsData = (cam_cds_data_t *)
8388                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8389
8390        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8391                CAM_INTF_META_CDS_DATA, *cdsData)) {
8392            rc = BAD_VALUE;
8393        }
8394    }
8395
8396    return rc;
8397}
8398
8399/*===========================================================================
8400 * FUNCTION   : captureResultCb
8401 *
8402 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8403 *
8404 * PARAMETERS :
8405 *   @frame  : frame information from mm-camera-interface
8406 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8407 *   @userdata: userdata
8408 *
8409 * RETURN     : NONE
8410 *==========================================================================*/
8411void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8412                camera3_stream_buffer_t *buffer,
8413                uint32_t frame_number, void *userdata)
8414{
8415    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8416    if (hw == NULL) {
8417        ALOGE("%s: Invalid hw %p", __func__, hw);
8418        return;
8419    }
8420
8421    hw->captureResultCb(metadata, buffer, frame_number);
8422    return;
8423}
8424
8425
8426/*===========================================================================
8427 * FUNCTION   : initialize
8428 *
8429 * DESCRIPTION: Pass framework callback pointers to HAL
8430 *
8431 * PARAMETERS :
8432 *
8433 *
8434 * RETURN     : Success : 0
8435 *              Failure: -ENODEV
8436 *==========================================================================*/
8437
8438int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8439                                  const camera3_callback_ops_t *callback_ops)
8440{
8441    CDBG("%s: E", __func__);
8442    QCamera3HardwareInterface *hw =
8443        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8444    if (!hw) {
8445        ALOGE("%s: NULL camera device", __func__);
8446        return -ENODEV;
8447    }
8448
8449    int rc = hw->initialize(callback_ops);
8450    CDBG("%s: X", __func__);
8451    return rc;
8452}
8453
8454/*===========================================================================
8455 * FUNCTION   : configure_streams
8456 *
8457 * DESCRIPTION:
8458 *
8459 * PARAMETERS :
8460 *
8461 *
8462 * RETURN     : Success: 0
8463 *              Failure: -EINVAL (if stream configuration is invalid)
8464 *                       -ENODEV (fatal error)
8465 *==========================================================================*/
8466
8467int QCamera3HardwareInterface::configure_streams(
8468        const struct camera3_device *device,
8469        camera3_stream_configuration_t *stream_list)
8470{
8471    CDBG("%s: E", __func__);
8472    QCamera3HardwareInterface *hw =
8473        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8474    if (!hw) {
8475        ALOGE("%s: NULL camera device", __func__);
8476        return -ENODEV;
8477    }
8478    int rc = hw->configureStreams(stream_list);
8479    CDBG("%s: X", __func__);
8480    return rc;
8481}
8482
8483/*===========================================================================
8484 * FUNCTION   : construct_default_request_settings
8485 *
8486 * DESCRIPTION: Configure a settings buffer to meet the required use case
8487 *
8488 * PARAMETERS :
8489 *
8490 *
8491 * RETURN     : Success: Return valid metadata
8492 *              Failure: Return NULL
8493 *==========================================================================*/
8494const camera_metadata_t* QCamera3HardwareInterface::
8495    construct_default_request_settings(const struct camera3_device *device,
8496                                        int type)
8497{
8498
8499    CDBG("%s: E", __func__);
8500    camera_metadata_t* fwk_metadata = NULL;
8501    QCamera3HardwareInterface *hw =
8502        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8503    if (!hw) {
8504        ALOGE("%s: NULL camera device", __func__);
8505        return NULL;
8506    }
8507
8508    fwk_metadata = hw->translateCapabilityToMetadata(type);
8509
8510    CDBG("%s: X", __func__);
8511    return fwk_metadata;
8512}
8513
8514/*===========================================================================
8515 * FUNCTION   : process_capture_request
8516 *
8517 * DESCRIPTION:
8518 *
8519 * PARAMETERS :
8520 *
8521 *
8522 * RETURN     :
8523 *==========================================================================*/
8524int QCamera3HardwareInterface::process_capture_request(
8525                    const struct camera3_device *device,
8526                    camera3_capture_request_t *request)
8527{
8528    CDBG("%s: E", __func__);
8529    QCamera3HardwareInterface *hw =
8530        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8531    if (!hw) {
8532        ALOGE("%s: NULL camera device", __func__);
8533        return -EINVAL;
8534    }
8535
8536    int rc = hw->processCaptureRequest(request);
8537    CDBG("%s: X", __func__);
8538    return rc;
8539}
8540
8541/*===========================================================================
8542 * FUNCTION   : dump
8543 *
8544 * DESCRIPTION:
8545 *
8546 * PARAMETERS :
8547 *
8548 *
8549 * RETURN     :
8550 *==========================================================================*/
8551
8552void QCamera3HardwareInterface::dump(
8553                const struct camera3_device *device, int fd)
8554{
8555    /* Log level property is read when "adb shell dumpsys media.camera" is
8556       called so that the log level can be controlled without restarting
8557       the media server */
8558    getLogLevel();
8559
8560    CDBG("%s: E", __func__);
8561    QCamera3HardwareInterface *hw =
8562        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8563    if (!hw) {
8564        ALOGE("%s: NULL camera device", __func__);
8565        return;
8566    }
8567
8568    hw->dump(fd);
8569    CDBG("%s: X", __func__);
8570    return;
8571}
8572
8573/*===========================================================================
8574 * FUNCTION   : flush
8575 *
8576 * DESCRIPTION:
8577 *
8578 * PARAMETERS :
8579 *
8580 *
8581 * RETURN     :
8582 *==========================================================================*/
8583
8584int QCamera3HardwareInterface::flush(
8585                const struct camera3_device *device)
8586{
8587    int rc;
8588    CDBG("%s: E", __func__);
8589    QCamera3HardwareInterface *hw =
8590        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8591    if (!hw) {
8592        ALOGE("%s: NULL camera device", __func__);
8593        return -EINVAL;
8594    }
8595
8596    rc = hw->flush();
8597    CDBG("%s: X", __func__);
8598    return rc;
8599}
8600
8601/*===========================================================================
8602 * FUNCTION   : close_camera_device
8603 *
8604 * DESCRIPTION:
8605 *
8606 * PARAMETERS :
8607 *
8608 *
8609 * RETURN     :
8610 *==========================================================================*/
8611int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8612{
8613    CDBG("%s: E", __func__);
8614    int ret = NO_ERROR;
8615    QCamera3HardwareInterface *hw =
8616        reinterpret_cast<QCamera3HardwareInterface *>(
8617            reinterpret_cast<camera3_device_t *>(device)->priv);
8618    if (!hw) {
8619        ALOGE("NULL camera device");
8620        return BAD_VALUE;
8621    }
8622    delete hw;
8623
8624    CDBG("%s: X", __func__);
8625    return ret;
8626}
8627
8628/*===========================================================================
8629 * FUNCTION   : getWaveletDenoiseProcessPlate
8630 *
8631 * DESCRIPTION: query wavelet denoise process plate
8632 *
8633 * PARAMETERS : None
8634 *
8635 * RETURN     : WNR prcocess plate value
8636 *==========================================================================*/
8637cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8638{
8639    char prop[PROPERTY_VALUE_MAX];
8640    memset(prop, 0, sizeof(prop));
8641    property_get("persist.denoise.process.plates", prop, "0");
8642    int processPlate = atoi(prop);
8643    switch(processPlate) {
8644    case 0:
8645        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8646    case 1:
8647        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8648    case 2:
8649        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8650    case 3:
8651        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8652    default:
8653        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8654    }
8655}
8656
8657
8658/*===========================================================================
8659 * FUNCTION   : getTemporalDenoiseProcessPlate
8660 *
8661 * DESCRIPTION: query temporal denoise process plate
8662 *
8663 * PARAMETERS : None
8664 *
8665 * RETURN     : TNR prcocess plate value
8666 *==========================================================================*/
8667cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
8668{
8669    char prop[PROPERTY_VALUE_MAX];
8670    memset(prop, 0, sizeof(prop));
8671    property_get("persist.tnr.process.plates", prop, "0");
8672    int processPlate = atoi(prop);
8673    switch(processPlate) {
8674    case 0:
8675        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8676    case 1:
8677        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8678    case 2:
8679        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8680    case 3:
8681        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8682    default:
8683        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8684    }
8685}
8686
8687
8688/*===========================================================================
8689 * FUNCTION   : extractSceneMode
8690 *
8691 * DESCRIPTION: Extract scene mode from frameworks set metadata
8692 *
8693 * PARAMETERS :
8694 *      @frame_settings: CameraMetadata reference
8695 *      @metaMode: ANDROID_CONTORL_MODE
8696 *      @hal_metadata: hal metadata structure
8697 *
8698 * RETURN     : None
8699 *==========================================================================*/
8700int32_t QCamera3HardwareInterface::extractSceneMode(
8701        const CameraMetadata &frame_settings, uint8_t metaMode,
8702        metadata_buffer_t *hal_metadata)
8703{
8704    int32_t rc = NO_ERROR;
8705
8706    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
8707        camera_metadata_ro_entry entry =
8708                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
8709        if (0 == entry.count)
8710            return rc;
8711
8712        uint8_t fwk_sceneMode = entry.data.u8[0];
8713
8714        int val = lookupHalName(SCENE_MODES_MAP,
8715                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
8716                fwk_sceneMode);
8717        if (NAME_NOT_FOUND != val) {
8718            uint8_t sceneMode = (uint8_t)val;
8719            CDBG("%s: sceneMode: %d", __func__, sceneMode);
8720            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8721                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8722                rc = BAD_VALUE;
8723            }
8724        }
8725    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
8726            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
8727        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
8728        CDBG("%s: sceneMode: %d", __func__, sceneMode);
8729        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8730                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8731            rc = BAD_VALUE;
8732        }
8733    }
8734    return rc;
8735}
8736
8737/*===========================================================================
8738 * FUNCTION   : needRotationReprocess
8739 *
8740 * DESCRIPTION: if rotation needs to be done by reprocess in pp
8741 *
8742 * PARAMETERS : none
8743 *
8744 * RETURN     : true: needed
8745 *              false: no need
8746 *==========================================================================*/
8747bool QCamera3HardwareInterface::needRotationReprocess()
8748{
8749    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
8750        // current rotation is not zero, and pp has the capability to process rotation
8751        CDBG_HIGH("%s: need do reprocess for rotation", __func__);
8752        return true;
8753    }
8754
8755    return false;
8756}
8757
8758/*===========================================================================
8759 * FUNCTION   : needReprocess
8760 *
8761 * DESCRIPTION: if reprocess in needed
8762 *
8763 * PARAMETERS : none
8764 *
8765 * RETURN     : true: needed
8766 *              false: no need
8767 *==========================================================================*/
8768bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
8769{
8770    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
8771        // TODO: add for ZSL HDR later
8772        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
8773        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
8774            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
8775            return true;
8776        } else {
8777            CDBG_HIGH("%s: already post processed frame", __func__);
8778            return false;
8779        }
8780    }
8781    return needRotationReprocess();
8782}
8783
8784/*===========================================================================
8785 * FUNCTION   : needJpegRotation
8786 *
8787 * DESCRIPTION: if rotation from jpeg is needed
8788 *
8789 * PARAMETERS : none
8790 *
8791 * RETURN     : true: needed
8792 *              false: no need
8793 *==========================================================================*/
8794bool QCamera3HardwareInterface::needJpegRotation()
8795{
8796   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
8797    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
8798       CDBG("%s: Need Jpeg to do the rotation", __func__);
8799       return true;
8800    }
8801    return false;
8802}
8803
8804/*===========================================================================
8805 * FUNCTION   : addOfflineReprocChannel
8806 *
8807 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
8808 *              coming from input channel
8809 *
8810 * PARAMETERS :
8811 *   @config  : reprocess configuration
8812 *   @inputChHandle : pointer to the input (source) channel
8813 *
8814 *
8815 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
8816 *==========================================================================*/
8817QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
8818        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
8819{
8820    int32_t rc = NO_ERROR;
8821    QCamera3ReprocessChannel *pChannel = NULL;
8822
8823    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
8824            mChannelHandle, mCameraHandle->ops, NULL, config.padding,
8825            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
8826    if (NULL == pChannel) {
8827        ALOGE("%s: no mem for reprocess channel", __func__);
8828        return NULL;
8829    }
8830
8831    rc = pChannel->initialize(IS_TYPE_NONE);
8832    if (rc != NO_ERROR) {
8833        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
8834        delete pChannel;
8835        return NULL;
8836    }
8837
8838    // pp feature config
8839    cam_pp_feature_config_t pp_config;
8840    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
8841
8842    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
8843
8844    rc = pChannel->addReprocStreamsFromSource(pp_config,
8845            config,
8846            IS_TYPE_NONE,
8847            mMetadataChannel);
8848
8849    if (rc != NO_ERROR) {
8850        delete pChannel;
8851        return NULL;
8852    }
8853    return pChannel;
8854}
8855
8856/*===========================================================================
8857 * FUNCTION   : getMobicatMask
8858 *
8859 * DESCRIPTION: returns mobicat mask
8860 *
8861 * PARAMETERS : none
8862 *
8863 * RETURN     : mobicat mask
8864 *
8865 *==========================================================================*/
8866uint8_t QCamera3HardwareInterface::getMobicatMask()
8867{
8868    return m_MobicatMask;
8869}
8870
8871/*===========================================================================
8872 * FUNCTION   : setMobicat
8873 *
8874 * DESCRIPTION: set Mobicat on/off.
8875 *
8876 * PARAMETERS :
8877 *   @params  : none
8878 *
8879 * RETURN     : int32_t type of status
8880 *              NO_ERROR  -- success
8881 *              none-zero failure code
8882 *==========================================================================*/
8883int32_t QCamera3HardwareInterface::setMobicat()
8884{
8885    char value [PROPERTY_VALUE_MAX];
8886    property_get("persist.camera.mobicat", value, "0");
8887    int32_t ret = NO_ERROR;
8888    uint8_t enableMobi = (uint8_t)atoi(value);
8889
8890    if (enableMobi) {
8891        tune_cmd_t tune_cmd;
8892        tune_cmd.type = SET_RELOAD_CHROMATIX;
8893        tune_cmd.module = MODULE_ALL;
8894        tune_cmd.value = TRUE;
8895        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8896                CAM_INTF_PARM_SET_VFE_COMMAND,
8897                tune_cmd);
8898
8899        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8900                CAM_INTF_PARM_SET_PP_COMMAND,
8901                tune_cmd);
8902    }
8903    m_MobicatMask = enableMobi;
8904
8905    return ret;
8906}
8907
8908/*===========================================================================
8909* FUNCTION   : getLogLevel
8910*
8911* DESCRIPTION: Reads the log level property into a variable
8912*
8913* PARAMETERS :
8914*   None
8915*
8916* RETURN     :
8917*   None
8918*==========================================================================*/
8919void QCamera3HardwareInterface::getLogLevel()
8920{
8921    char prop[PROPERTY_VALUE_MAX];
8922    uint32_t globalLogLevel = 0;
8923
8924    property_get("persist.camera.hal.debug", prop, "0");
8925    int val = atoi(prop);
8926    if (0 <= val) {
8927        gCamHal3LogLevel = (uint32_t)val;
8928    }
8929    property_get("persist.camera.global.debug", prop, "0");
8930    val = atoi(prop);
8931    if (0 <= val) {
8932        globalLogLevel = (uint32_t)val;
8933    }
8934
8935    /* Highest log level among hal.logs and global.logs is selected */
8936    if (gCamHal3LogLevel < globalLogLevel)
8937        gCamHal3LogLevel = globalLogLevel;
8938
8939    return;
8940}
8941
8942/*===========================================================================
8943 * FUNCTION   : validateStreamRotations
8944 *
8945 * DESCRIPTION: Check if the rotations requested are supported
8946 *
8947 * PARAMETERS :
8948 *   @stream_list : streams to be configured
8949 *
8950 * RETURN     : NO_ERROR on success
8951 *              -EINVAL on failure
8952 *
8953 *==========================================================================*/
8954int QCamera3HardwareInterface::validateStreamRotations(
8955        camera3_stream_configuration_t *streamList)
8956{
8957    int rc = NO_ERROR;
8958
8959    /*
8960    * Loop through all streams requested in configuration
8961    * Check if unsupported rotations have been requested on any of them
8962    */
8963    for (size_t j = 0; j < streamList->num_streams; j++){
8964        camera3_stream_t *newStream = streamList->streams[j];
8965
8966        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
8967        bool isImplDef = (newStream->format ==
8968                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
8969        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
8970                isImplDef);
8971
8972        if (isRotated && (!isImplDef || isZsl)) {
8973            ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
8974                    "type:%d and stream format:%d", __func__,
8975                    newStream->rotation, newStream->stream_type,
8976                    newStream->format);
8977            rc = -EINVAL;
8978            break;
8979        }
8980    }
8981    return rc;
8982}
8983
8984/*===========================================================================
8985* FUNCTION   : getFlashInfo
8986*
8987* DESCRIPTION: Retrieve information about whether the device has a flash.
8988*
8989* PARAMETERS :
8990*   @cameraId  : Camera id to query
8991*   @hasFlash  : Boolean indicating whether there is a flash device
8992*                associated with given camera
8993*   @flashNode : If a flash device exists, this will be its device node.
8994*
8995* RETURN     :
8996*   None
8997*==========================================================================*/
8998void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
8999        bool& hasFlash,
9000        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9001{
9002    cam_capability_t* camCapability = gCamCapability[cameraId];
9003    if (NULL == camCapability) {
9004        hasFlash = false;
9005        flashNode[0] = '\0';
9006    } else {
9007        hasFlash = camCapability->flash_available;
9008        strlcpy(flashNode,
9009                (char*)camCapability->flash_dev_name,
9010                QCAMERA_MAX_FILEPATH_LENGTH);
9011    }
9012}
9013
9014/*===========================================================================
9015* FUNCTION   : getEepromVersionInfo
9016*
9017* DESCRIPTION: Retrieve version info of the sensor EEPROM data
9018*
9019* PARAMETERS : None
9020*
9021* RETURN     : string describing EEPROM version
9022*              "\0" if no such info available
9023*==========================================================================*/
9024const char *QCamera3HardwareInterface::getEepromVersionInfo()
9025{
9026    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9027}
9028
9029/*===========================================================================
9030* FUNCTION   : getLdafCalib
9031*
9032* DESCRIPTION: Retrieve Laser AF calibration data
9033*
9034* PARAMETERS : None
9035*
9036* RETURN     : Two uint32_t describing laser AF calibration data
9037*              NULL if none is available.
9038*==========================================================================*/
9039const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9040{
9041    if (mLdafCalibExist) {
9042        return &mLdafCalib[0];
9043    } else {
9044        return NULL;
9045    }
9046}
9047
9048/*===========================================================================
9049 * FUNCTION   : dynamicUpdateMetaStreamInfo
9050 *
9051 * DESCRIPTION: This function:
9052 *             (1) stops all the channels
9053 *             (2) returns error on pending requests and buffers
9054 *             (3) sends metastream_info in setparams
9055 *             (4) starts all channels
9056 *             This is useful when sensor has to be restarted to apply any
9057 *             settings such as frame rate from a different sensor mode
9058 *
9059 * PARAMETERS : None
9060 *
9061 * RETURN     : NO_ERROR on success
9062 *              Error codes on failure
9063 *
9064 *==========================================================================*/
9065int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9066{
9067    ATRACE_CALL();
9068    int rc = NO_ERROR;
9069
9070    CDBG("%s: E", __func__);
9071
9072    rc = stopAllChannels();
9073    if (rc < 0) {
9074        ALOGE("%s: stopAllChannels failed", __func__);
9075        return rc;
9076    }
9077
9078    rc = notifyErrorForPendingRequests();
9079    if (rc < 0) {
9080        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9081        return rc;
9082    }
9083
9084    /* Send meta stream info once again so that ISP can start */
9085    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9086            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9087    CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9088    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9089            mParameters);
9090    if (rc < 0) {
9091        ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9092                __func__);
9093    }
9094
9095    rc = startAllChannels();
9096    if (rc < 0) {
9097        ALOGE("%s: startAllChannels failed", __func__);
9098        return rc;
9099    }
9100
9101    CDBG("%s:%d X", __func__, __LINE__);
9102    return rc;
9103}
9104
9105/*===========================================================================
9106 * FUNCTION   : stopAllChannels
9107 *
9108 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9109 *
9110 * PARAMETERS : None
9111 *
9112 * RETURN     : NO_ERROR on success
9113 *              Error codes on failure
9114 *
9115 *==========================================================================*/
9116int32_t QCamera3HardwareInterface::stopAllChannels()
9117{
9118    int32_t rc = NO_ERROR;
9119
9120    // Stop the Streams/Channels
9121    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9122        it != mStreamInfo.end(); it++) {
9123        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9124        channel->stop();
9125        (*it)->status = INVALID;
9126    }
9127
9128    if (mSupportChannel) {
9129        mSupportChannel->stop();
9130    }
9131    if (mAnalysisChannel) {
9132        mAnalysisChannel->stop();
9133    }
9134    if (mRawDumpChannel) {
9135        mRawDumpChannel->stop();
9136    }
9137    if (mMetadataChannel) {
9138        /* If content of mStreamInfo is not 0, there is metadata stream */
9139        mMetadataChannel->stop();
9140    }
9141
9142    CDBG("%s:%d All channels stopped", __func__, __LINE__);
9143    return rc;
9144}
9145
9146/*===========================================================================
9147 * FUNCTION   : startAllChannels
9148 *
9149 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9150 *
9151 * PARAMETERS : None
9152 *
9153 * RETURN     : NO_ERROR on success
9154 *              Error codes on failure
9155 *
9156 *==========================================================================*/
9157int32_t QCamera3HardwareInterface::startAllChannels()
9158{
9159    int32_t rc = NO_ERROR;
9160
9161    CDBG("%s: Start all channels ", __func__);
9162    // Start the Streams/Channels
9163    if (mMetadataChannel) {
9164        /* If content of mStreamInfo is not 0, there is metadata stream */
9165        rc = mMetadataChannel->start();
9166        if (rc < 0) {
9167            ALOGE("%s: META channel start failed", __func__);
9168            return rc;
9169        }
9170    }
9171    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9172        it != mStreamInfo.end(); it++) {
9173        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9174        rc = channel->start();
9175        if (rc < 0) {
9176            ALOGE("%s: channel start failed", __func__);
9177            return rc;
9178        }
9179    }
9180    if (mAnalysisChannel) {
9181        mAnalysisChannel->start();
9182    }
9183    if (mSupportChannel) {
9184        rc = mSupportChannel->start();
9185        if (rc < 0) {
9186            ALOGE("%s: Support channel start failed", __func__);
9187            return rc;
9188        }
9189    }
9190    if (mRawDumpChannel) {
9191        rc = mRawDumpChannel->start();
9192        if (rc < 0) {
9193            ALOGE("%s: RAW dump channel start failed", __func__);
9194            return rc;
9195        }
9196    }
9197
9198    CDBG("%s:%d All channels started", __func__, __LINE__);
9199    return rc;
9200}
9201
9202/*===========================================================================
9203 * FUNCTION   : notifyErrorForPendingRequests
9204 *
9205 * DESCRIPTION: This function sends error for all the pending requests/buffers
9206 *
9207 * PARAMETERS : None
9208 *
9209 * RETURN     : Error codes
9210 *              NO_ERROR on success
9211 *
9212 *==========================================================================*/
9213int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9214{
9215    int32_t rc = NO_ERROR;
9216    unsigned int frameNum = 0;
9217    camera3_capture_result_t result;
9218    camera3_stream_buffer_t *pStream_Buf = NULL;
9219    FlushMap flushMap;
9220
9221    memset(&result, 0, sizeof(camera3_capture_result_t));
9222
9223    if (mPendingRequestsList.size() > 0) {
9224        pendingRequestIterator i = mPendingRequestsList.begin();
9225        frameNum = i->frame_number;
9226    } else {
9227        /* There might still be pending buffers even though there are
9228         no pending requests. Setting the frameNum to MAX so that
9229         all the buffers with smaller frame numbers are returned */
9230        frameNum = UINT_MAX;
9231    }
9232
9233    CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
9234      __func__, frameNum);
9235
9236    // Go through the pending buffers and group them depending
9237    // on frame number
9238    for (List<PendingBufferInfo>::iterator k =
9239            mPendingBuffersMap.mPendingBufferList.begin();
9240            k != mPendingBuffersMap.mPendingBufferList.end();) {
9241
9242        if (k->frame_number < frameNum) {
9243            ssize_t idx = flushMap.indexOfKey(k->frame_number);
9244            if (idx == NAME_NOT_FOUND) {
9245                Vector<PendingBufferInfo> pending;
9246                pending.add(*k);
9247                flushMap.add(k->frame_number, pending);
9248            } else {
9249                Vector<PendingBufferInfo> &pending =
9250                        flushMap.editValueFor(k->frame_number);
9251                pending.add(*k);
9252            }
9253
9254            mPendingBuffersMap.num_buffers--;
9255            k = mPendingBuffersMap.mPendingBufferList.erase(k);
9256        } else {
9257            k++;
9258        }
9259    }
9260
9261    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9262        uint32_t frame_number = flushMap.keyAt(iFlush);
9263        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9264
9265        // Send Error notify to frameworks for each buffer for which
9266        // metadata buffer is already sent
9267        CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9268          __func__, frame_number, pending.size());
9269
9270        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9271        if (NULL == pStream_Buf) {
9272            ALOGE("%s: No memory for pending buffers array", __func__);
9273            return NO_MEMORY;
9274        }
9275        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9276
9277        for (size_t j = 0; j < pending.size(); j++) {
9278            const PendingBufferInfo &info = pending.itemAt(j);
9279            camera3_notify_msg_t notify_msg;
9280            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9281            notify_msg.type = CAMERA3_MSG_ERROR;
9282            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9283            notify_msg.message.error.error_stream = info.stream;
9284            notify_msg.message.error.frame_number = frame_number;
9285            pStream_Buf[j].acquire_fence = -1;
9286            pStream_Buf[j].release_fence = -1;
9287            pStream_Buf[j].buffer = info.buffer;
9288            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9289            pStream_Buf[j].stream = info.stream;
9290            mCallbackOps->notify(mCallbackOps, &notify_msg);
9291            CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9292                    frame_number, info.stream);
9293        }
9294
9295        result.result = NULL;
9296        result.frame_number = frame_number;
9297        result.num_output_buffers = (uint32_t)pending.size();
9298        result.output_buffers = pStream_Buf;
9299        mCallbackOps->process_capture_result(mCallbackOps, &result);
9300
9301        delete [] pStream_Buf;
9302    }
9303
9304    CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9305
9306    flushMap.clear();
9307    for (List<PendingBufferInfo>::iterator k =
9308            mPendingBuffersMap.mPendingBufferList.begin();
9309            k != mPendingBuffersMap.mPendingBufferList.end();) {
9310        ssize_t idx = flushMap.indexOfKey(k->frame_number);
9311        if (idx == NAME_NOT_FOUND) {
9312            Vector<PendingBufferInfo> pending;
9313            pending.add(*k);
9314            flushMap.add(k->frame_number, pending);
9315        } else {
9316            Vector<PendingBufferInfo> &pending =
9317                    flushMap.editValueFor(k->frame_number);
9318            pending.add(*k);
9319        }
9320
9321        mPendingBuffersMap.num_buffers--;
9322        k = mPendingBuffersMap.mPendingBufferList.erase(k);
9323    }
9324
9325    pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9326
9327    // Go through the pending requests info and send error request to framework
9328    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9329        uint32_t frame_number = flushMap.keyAt(iFlush);
9330        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9331        CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9332              __func__, frame_number);
9333
9334        // Send shutter notify to frameworks
9335        camera3_notify_msg_t notify_msg;
9336        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9337        notify_msg.type = CAMERA3_MSG_ERROR;
9338        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9339        notify_msg.message.error.error_stream = NULL;
9340        notify_msg.message.error.frame_number = frame_number;
9341        mCallbackOps->notify(mCallbackOps, &notify_msg);
9342
9343        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9344        if (NULL == pStream_Buf) {
9345            ALOGE("%s: No memory for pending buffers array", __func__);
9346            return NO_MEMORY;
9347        }
9348        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9349
9350        for (size_t j = 0; j < pending.size(); j++) {
9351            const PendingBufferInfo &info = pending.itemAt(j);
9352            pStream_Buf[j].acquire_fence = -1;
9353            pStream_Buf[j].release_fence = -1;
9354            pStream_Buf[j].buffer = info.buffer;
9355            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9356            pStream_Buf[j].stream = info.stream;
9357        }
9358
9359        result.input_buffer = i->input_buffer;
9360        result.num_output_buffers = (uint32_t)pending.size();
9361        result.output_buffers = pStream_Buf;
9362        result.result = NULL;
9363        result.frame_number = frame_number;
9364        mCallbackOps->process_capture_result(mCallbackOps, &result);
9365        delete [] pStream_Buf;
9366        i = erasePendingRequest(i);
9367    }
9368
9369    /* Reset pending frame Drop list and requests list */
9370    mPendingFrameDropList.clear();
9371
9372    flushMap.clear();
9373    mPendingBuffersMap.num_buffers = 0;
9374    mPendingBuffersMap.mPendingBufferList.clear();
9375    mPendingReprocessResultList.clear();
9376    CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9377
9378    return rc;
9379}
9380
9381bool QCamera3HardwareInterface::isOnEncoder(
9382        const cam_dimension_t max_viewfinder_size,
9383        uint32_t width, uint32_t height)
9384{
9385    return (width > (uint32_t)max_viewfinder_size.width ||
9386            height > (uint32_t)max_viewfinder_size.height);
9387}
9388
9389/*===========================================================================
9390 * FUNCTION   : setBundleInfo
9391 *
9392 * DESCRIPTION: Set bundle info for all streams that are bundle.
9393 *
9394 * PARAMETERS : None
9395 *
9396 * RETURN     : NO_ERROR on success
9397 *              Error codes on failure
9398 *==========================================================================*/
9399int32_t QCamera3HardwareInterface::setBundleInfo()
9400{
9401    int32_t rc = NO_ERROR;
9402
9403    if (mChannelHandle) {
9404        cam_bundle_config_t bundleInfo;
9405        memset(&bundleInfo, 0, sizeof(bundleInfo));
9406        rc = mCameraHandle->ops->get_bundle_info(
9407                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9408        if (rc != NO_ERROR) {
9409            ALOGE("%s: get_bundle_info failed", __func__);
9410            return rc;
9411        }
9412        if (mAnalysisChannel) {
9413            mAnalysisChannel->setBundleInfo(bundleInfo);
9414        }
9415        if (mSupportChannel) {
9416            mSupportChannel->setBundleInfo(bundleInfo);
9417        }
9418        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9419                it != mStreamInfo.end(); it++) {
9420            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9421            channel->setBundleInfo(bundleInfo);
9422        }
9423        if (mRawDumpChannel) {
9424            mRawDumpChannel->setBundleInfo(bundleInfo);
9425        }
9426    }
9427
9428    return rc;
9429}
9430
9431}; //end namespace qcamera
9432