QCamera3HWI.cpp revision 080f30216a4336ff35fdddea84eb39f92368e0d2
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define ATRACE_TAG ATRACE_TAG_CAMERA
31#define LOG_TAG "QCamera3HWI"
32//#define LOG_NDEBUG 0
33
34#define __STDC_LIMIT_MACROS
35#include <cutils/properties.h>
36#include <hardware/camera3.h>
37#include <camera/CameraMetadata.h>
38#include <stdio.h>
39#include <stdlib.h>
40#include <fcntl.h>
41#include <stdint.h>
42#include <utils/Log.h>
43#include <utils/Errors.h>
44#include <utils/Trace.h>
45#include <sync/sync.h>
46#include <gralloc_priv.h>
47#include "util/QCameraFlash.h"
48#include "QCamera3HWI.h"
49#include "QCamera3Mem.h"
50#include "QCamera3Channel.h"
51#include "QCamera3PostProc.h"
52#include "QCamera3VendorTags.h"
53
54using namespace android;
55
56namespace qcamera {
57
58#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60#define EMPTY_PIPELINE_DELAY 2
61#define PARTIAL_RESULT_COUNT 2
62#define FRAME_SKIP_DELAY     0
63#define CAM_MAX_SYNC_LATENCY 4
64
65#define MAX_VALUE_8BIT ((1<<8)-1)
66#define MAX_VALUE_10BIT ((1<<10)-1)
67#define MAX_VALUE_12BIT ((1<<12)-1)
68
69#define VIDEO_4K_WIDTH  3840
70#define VIDEO_4K_HEIGHT 2160
71
72#define MAX_EIS_WIDTH 1920
73#define MAX_EIS_HEIGHT 1080
74
75#define MAX_RAW_STREAMS        1
76#define MAX_STALLING_STREAMS   1
77#define MAX_PROCESSED_STREAMS  3
78/* Batch mode is enabled only if FPS set is equal to or greater than this */
79#define MIN_FPS_FOR_BATCH_MODE (120)
80#define PREVIEW_FPS_FOR_HFR    (30)
81#define DEFAULT_VIDEO_FPS      (30.0)
82#define MAX_HFR_BATCH_SIZE     (4)
83#define REGIONS_TUPLE_COUNT    5
84#define MAX_INFLIGHT_HFR_REQUESTS (48)
85#define MIN_INFLIGHT_HFR_REQUESTS (40)
86
87#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
88
89#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
90                                              CAM_QCOM_FEATURE_CROP |\
91                                              CAM_QCOM_FEATURE_ROTATION |\
92                                              CAM_QCOM_FEATURE_SHARPNESS |\
93                                              CAM_QCOM_FEATURE_SCALE |\
94                                              CAM_QCOM_FEATURE_CAC |\
95                                              CAM_QCOM_FEATURE_CDS )
96
97#define TIMEOUT_NEVER -1
98
99cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
100const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
101static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
102volatile uint32_t gCamHal3LogLevel = 1;
103
104const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
105    {"On",  CAM_CDS_MODE_ON},
106    {"Off", CAM_CDS_MODE_OFF},
107    {"Auto",CAM_CDS_MODE_AUTO}
108};
109
110const QCamera3HardwareInterface::QCameraMap<
111        camera_metadata_enum_android_control_effect_mode_t,
112        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
113    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
114    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
115    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
116    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
117    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
118    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
119    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
120    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
121    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
122};
123
124const QCamera3HardwareInterface::QCameraMap<
125        camera_metadata_enum_android_control_awb_mode_t,
126        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
127    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
128    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
129    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
130    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
131    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
132    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
133    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
134    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
135    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
136};
137
138const QCamera3HardwareInterface::QCameraMap<
139        camera_metadata_enum_android_control_scene_mode_t,
140        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
141    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
142    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
143    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
144    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
145    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
146    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
147    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
148    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
149    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
150    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
151    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
152    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
153    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
154    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
155    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
156    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
157};
158
159const QCamera3HardwareInterface::QCameraMap<
160        camera_metadata_enum_android_control_af_mode_t,
161        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
162    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
163    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
164    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
165    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
166    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
167    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
168    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
169};
170
171const QCamera3HardwareInterface::QCameraMap<
172        camera_metadata_enum_android_color_correction_aberration_mode_t,
173        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
174    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
175            CAM_COLOR_CORRECTION_ABERRATION_OFF },
176    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
177            CAM_COLOR_CORRECTION_ABERRATION_FAST },
178    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
179            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
180};
181
182const QCamera3HardwareInterface::QCameraMap<
183        camera_metadata_enum_android_control_ae_antibanding_mode_t,
184        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
185    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
186    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
187    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
188    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
189};
190
191const QCamera3HardwareInterface::QCameraMap<
192        camera_metadata_enum_android_control_ae_mode_t,
193        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
194    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
195    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
196    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
197    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
198    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_flash_mode_t,
203        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
204    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
205    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
206    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
207};
208
209const QCamera3HardwareInterface::QCameraMap<
210        camera_metadata_enum_android_statistics_face_detect_mode_t,
211        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
212    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
213    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
214};
215
216const QCamera3HardwareInterface::QCameraMap<
217        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
218        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
219    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
220      CAM_FOCUS_UNCALIBRATED },
221    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
222      CAM_FOCUS_APPROXIMATE },
223    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
224      CAM_FOCUS_CALIBRATED }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_state_t,
229        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
230    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
231    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
232};
233
234const int32_t available_thumbnail_sizes[] = {0, 0,
235                                             176, 144,
236                                             320, 240,
237                                             432, 288,
238                                             480, 288,
239                                             512, 288,
240                                             512, 384};
241
242const QCamera3HardwareInterface::QCameraMap<
243        camera_metadata_enum_android_sensor_test_pattern_mode_t,
244        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
245    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
246    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
247    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
248    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
249    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
250};
251
252/* Since there is no mapping for all the options some Android enum are not listed.
253 * Also, the order in this list is important because while mapping from HAL to Android it will
254 * traverse from lower to higher index which means that for HAL values that are map to different
255 * Android values, the traverse logic will select the first one found.
256 */
257const QCamera3HardwareInterface::QCameraMap<
258        camera_metadata_enum_android_sensor_reference_illuminant1_t,
259        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
260    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
261    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
262    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
263    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
264    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
265    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
266    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
267    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
268    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
269    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
270    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
271    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
272    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
273    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
276};
277
278const QCamera3HardwareInterface::QCameraMap<
279        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
280    { 60, CAM_HFR_MODE_60FPS},
281    { 90, CAM_HFR_MODE_90FPS},
282    { 120, CAM_HFR_MODE_120FPS},
283    { 150, CAM_HFR_MODE_150FPS},
284    { 180, CAM_HFR_MODE_180FPS},
285    { 210, CAM_HFR_MODE_210FPS},
286    { 240, CAM_HFR_MODE_240FPS},
287    { 480, CAM_HFR_MODE_480FPS},
288};
289
290camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
291    initialize:                         QCamera3HardwareInterface::initialize,
292    configure_streams:                  QCamera3HardwareInterface::configure_streams,
293    register_stream_buffers:            NULL,
294    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
295    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
296    get_metadata_vendor_tag_ops:        NULL,
297    dump:                               QCamera3HardwareInterface::dump,
298    flush:                              QCamera3HardwareInterface::flush,
299    reserved:                           {0},
300};
301
302/*===========================================================================
303 * FUNCTION   : QCamera3HardwareInterface
304 *
305 * DESCRIPTION: constructor of QCamera3HardwareInterface
306 *
307 * PARAMETERS :
308 *   @cameraId  : camera ID
309 *
310 * RETURN     : none
311 *==========================================================================*/
312QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
313        const camera_module_callbacks_t *callbacks)
314    : mCameraId(cameraId),
315      mCameraHandle(NULL),
316      mCameraOpened(false),
317      mCameraInitialized(false),
318      mCallbackOps(NULL),
319      mMetadataChannel(NULL),
320      mPictureChannel(NULL),
321      mRawChannel(NULL),
322      mSupportChannel(NULL),
323      mAnalysisChannel(NULL),
324      mRawDumpChannel(NULL),
325      mDummyBatchChannel(NULL),
326      mFirstRequest(false),
327      mFirstConfiguration(true),
328      mFlush(false),
329      mParamHeap(NULL),
330      mParameters(NULL),
331      mPrevParameters(NULL),
332      m_bIsVideo(false),
333      m_bIs4KVideo(false),
334      m_bEisSupportedSize(false),
335      m_bEisEnable(false),
336      m_MobicatMask(0),
337      mMinProcessedFrameDuration(0),
338      mMinJpegFrameDuration(0),
339      mMinRawFrameDuration(0),
340      m_pPowerModule(NULL),
341      mMetaFrameCount(0U),
342      mUpdateDebugLevel(false),
343      mCallbacks(callbacks),
344      mCaptureIntent(0),
345      mBatchSize(0),
346      mToBeQueuedVidBufs(0),
347      mHFRVideoFps(DEFAULT_VIDEO_FPS),
348      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
349      mPrevUrgentFrameNumber(0),
350      mPrevFrameNumber(0),
351      mNeedSensorRestart(false),
352      mLdafCalibExist(false)
353{
354    getLogLevel();
355    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
356    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
357    mCameraDevice.common.close = close_camera_device;
358    mCameraDevice.ops = &mCameraOps;
359    mCameraDevice.priv = this;
360    gCamCapability[cameraId]->version = CAM_HAL_V3;
361    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
362    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
363    gCamCapability[cameraId]->min_num_pp_bufs = 3;
364
365    pthread_cond_init(&mRequestCond, NULL);
366    mPendingLiveRequest = 0;
367    mCurrentRequestId = -1;
368    pthread_mutex_init(&mMutex, NULL);
369
370    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
371        mDefaultMetadata[i] = NULL;
372
373#ifdef HAS_MULTIMEDIA_HINTS
374    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
375        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
376    }
377#endif
378
379    // Getting system props of different kinds
380    char prop[PROPERTY_VALUE_MAX];
381    memset(prop, 0, sizeof(prop));
382    property_get("persist.camera.raw.dump", prop, "0");
383    mEnableRawDump = atoi(prop);
384    if (mEnableRawDump)
385        CDBG("%s: Raw dump from Camera HAL enabled", __func__);
386
387    memset(prop, 0, sizeof(prop));
388    property_get("persist.camera.facedetect", prop, "-1");
389    m_overrideAppFaceDetection = (int8_t)atoi(prop);
390    if (m_overrideAppFaceDetection >= 0)
391    {
392        CDBG_FATAL_IF(m_overrideAppFaceDetection > ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
393        CDBG("%s: Override face detection: %d", __func__, m_overrideAppFaceDetection);
394    }
395
396    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
397    memset(mLdafCalib, 0, sizeof(mLdafCalib));
398
399    memset(prop, 0, sizeof(prop));
400    property_get("persist.camera.tnr.preview", prop, "0");
401    m_bTnrEnabled = (uint8_t)atoi(prop);
402}
403
404/*===========================================================================
405 * FUNCTION   : ~QCamera3HardwareInterface
406 *
407 * DESCRIPTION: destructor of QCamera3HardwareInterface
408 *
409 * PARAMETERS : none
410 *
411 * RETURN     : none
412 *==========================================================================*/
413QCamera3HardwareInterface::~QCamera3HardwareInterface()
414{
415    CDBG("%s: E", __func__);
416    /* We need to stop all streams before deleting any stream */
417
418
419    if (mRawDumpChannel) {
420        mRawDumpChannel->stop();
421    }
422
423    // NOTE: 'camera3_stream_t *' objects are already freed at
424    //        this stage by the framework
425    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
426        it != mStreamInfo.end(); it++) {
427        QCamera3ProcessingChannel *channel = (*it)->channel;
428        if (channel) {
429            channel->stop();
430        }
431    }
432    if (mSupportChannel)
433        mSupportChannel->stop();
434
435    if (mAnalysisChannel) {
436        mAnalysisChannel->stop();
437    }
438
439    /* Turn off video hint */
440    updatePowerHint(m_bIsVideo, false);
441
442    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
443        it != mStreamInfo.end(); it++) {
444        QCamera3ProcessingChannel *channel = (*it)->channel;
445        if (channel)
446            delete channel;
447        free (*it);
448    }
449    if (mSupportChannel) {
450        delete mSupportChannel;
451        mSupportChannel = NULL;
452    }
453
454    if (mAnalysisChannel) {
455        delete mAnalysisChannel;
456        mAnalysisChannel = NULL;
457    }
458    if (mRawDumpChannel) {
459        delete mRawDumpChannel;
460        mRawDumpChannel = NULL;
461    }
462    if (mDummyBatchChannel) {
463        delete mDummyBatchChannel;
464        mDummyBatchChannel = NULL;
465    }
466    mPictureChannel = NULL;
467
468    /* Clean up all channels */
469    if (mCameraInitialized) {
470        if (mMetadataChannel) {
471            mMetadataChannel->stop();
472            delete mMetadataChannel;
473            mMetadataChannel = NULL;
474        }
475        if(!mFirstConfiguration){
476            //send the last unconfigure
477            cam_stream_size_info_t stream_config_info;
478            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
479            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
480            stream_config_info.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
481            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
482                    stream_config_info);
483            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
484            if (rc < 0) {
485                ALOGE("%s: set_parms failed for unconfigure", __func__);
486            }
487        }
488        deinitParameters();
489    }
490
491    if (mCameraOpened)
492        closeCamera();
493
494    mPendingBuffersMap.mPendingBufferList.clear();
495    mPendingReprocessResultList.clear();
496    for (pendingRequestIterator i = mPendingRequestsList.begin();
497            i != mPendingRequestsList.end();) {
498        i = erasePendingRequest(i);
499    }
500    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
501        if (mDefaultMetadata[i])
502            free_camera_metadata(mDefaultMetadata[i]);
503
504    pthread_cond_destroy(&mRequestCond);
505
506    pthread_mutex_destroy(&mMutex);
507    CDBG("%s: X", __func__);
508}
509
510/*===========================================================================
511 * FUNCTION   : erasePendingRequest
512 *
513 * DESCRIPTION: function to erase a desired pending request after freeing any
514 *              allocated memory
515 *
516 * PARAMETERS :
517 *   @i       : iterator pointing to pending request to be erased
518 *
519 * RETURN     : iterator pointing to the next request
520 *==========================================================================*/
521QCamera3HardwareInterface::pendingRequestIterator
522        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
523{
524    if (i->input_buffer != NULL) {
525        free(i->input_buffer);
526        i->input_buffer = NULL;
527    }
528    if (i->settings != NULL)
529        free_camera_metadata((camera_metadata_t*)i->settings);
530    return mPendingRequestsList.erase(i);
531}
532
533/*===========================================================================
534 * FUNCTION   : camEvtHandle
535 *
536 * DESCRIPTION: Function registered to mm-camera-interface to handle events
537 *
538 * PARAMETERS :
539 *   @camera_handle : interface layer camera handle
540 *   @evt           : ptr to event
541 *   @user_data     : user data ptr
542 *
543 * RETURN     : none
544 *==========================================================================*/
545void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
546                                          mm_camera_event_t *evt,
547                                          void *user_data)
548{
549    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
550    if (obj && evt) {
551        switch(evt->server_event_type) {
552            case CAM_EVENT_TYPE_DAEMON_DIED:
553                ALOGE("%s: Fatal, camera daemon died", __func__);
554                //close the camera backend
555                if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
556                        && obj->mCameraHandle->ops) {
557                    obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
558                } else {
559                    ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
560                            __func__);
561                }
562                camera3_notify_msg_t notify_msg;
563                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
564                notify_msg.type = CAMERA3_MSG_ERROR;
565                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
566                notify_msg.message.error.error_stream = NULL;
567                notify_msg.message.error.frame_number = 0;
568                obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
569                break;
570
571            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
572                CDBG("%s: HAL got request pull from Daemon", __func__);
573                pthread_mutex_lock(&obj->mMutex);
574                obj->mWokenUpByDaemon = true;
575                obj->unblockRequestIfNecessary();
576                pthread_mutex_unlock(&obj->mMutex);
577                break;
578
579            default:
580                CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
581                        evt->server_event_type);
582                break;
583        }
584    } else {
585        ALOGE("%s: NULL user_data/evt", __func__);
586    }
587}
588
589/*===========================================================================
590 * FUNCTION   : openCamera
591 *
592 * DESCRIPTION: open camera
593 *
594 * PARAMETERS :
595 *   @hw_device  : double ptr for camera device struct
596 *
597 * RETURN     : int32_t type of status
598 *              NO_ERROR  -- success
599 *              none-zero failure code
600 *==========================================================================*/
601int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
602{
603    int rc = 0;
604    if (mCameraOpened) {
605        *hw_device = NULL;
606        return PERMISSION_DENIED;
607    }
608
609    rc = openCamera();
610    if (rc == 0) {
611        *hw_device = &mCameraDevice.common;
612    } else
613        *hw_device = NULL;
614
615    return rc;
616}
617
618/*===========================================================================
619 * FUNCTION   : openCamera
620 *
621 * DESCRIPTION: open camera
622 *
623 * PARAMETERS : none
624 *
625 * RETURN     : int32_t type of status
626 *              NO_ERROR  -- success
627 *              none-zero failure code
628 *==========================================================================*/
629int QCamera3HardwareInterface::openCamera()
630{
631    int rc = 0;
632
633    ATRACE_CALL();
634    if (mCameraHandle) {
635        ALOGE("Failure: Camera already opened");
636        return ALREADY_EXISTS;
637    }
638
639    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
640    if (rc < 0) {
641        ALOGE("%s: Failed to reserve flash for camera id: %d",
642                __func__,
643                mCameraId);
644        return UNKNOWN_ERROR;
645    }
646
647    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
648    if (rc) {
649        ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
650        return rc;
651    }
652
653    mCameraOpened = true;
654
655    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
656            camEvtHandle, (void *)this);
657
658    if (rc < 0) {
659        ALOGE("%s: Error, failed to register event callback", __func__);
660        /* Not closing camera here since it is already handled in destructor */
661        return FAILED_TRANSACTION;
662    }
663    mFirstConfiguration = true;
664    return NO_ERROR;
665}
666
667/*===========================================================================
668 * FUNCTION   : closeCamera
669 *
670 * DESCRIPTION: close camera
671 *
672 * PARAMETERS : none
673 *
674 * RETURN     : int32_t type of status
675 *              NO_ERROR  -- success
676 *              none-zero failure code
677 *==========================================================================*/
678int QCamera3HardwareInterface::closeCamera()
679{
680    ATRACE_CALL();
681    int rc = NO_ERROR;
682
683    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
684    mCameraHandle = NULL;
685    mCameraOpened = false;
686
687    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
688        CDBG("%s: Failed to release flash for camera id: %d",
689                __func__,
690                mCameraId);
691    }
692
693    return rc;
694}
695
696/*===========================================================================
697 * FUNCTION   : initialize
698 *
699 * DESCRIPTION: Initialize frameworks callback functions
700 *
701 * PARAMETERS :
702 *   @callback_ops : callback function to frameworks
703 *
704 * RETURN     :
705 *
706 *==========================================================================*/
707int QCamera3HardwareInterface::initialize(
708        const struct camera3_callback_ops *callback_ops)
709{
710    ATRACE_CALL();
711    int rc;
712
713    pthread_mutex_lock(&mMutex);
714
715    rc = initParameters();
716    if (rc < 0) {
717        ALOGE("%s: initParamters failed %d", __func__, rc);
718       goto err1;
719    }
720    mCallbackOps = callback_ops;
721
722    pthread_mutex_unlock(&mMutex);
723    mCameraInitialized = true;
724    return 0;
725
726err1:
727    pthread_mutex_unlock(&mMutex);
728    return rc;
729}
730
731/*===========================================================================
732 * FUNCTION   : validateStreamDimensions
733 *
734 * DESCRIPTION: Check if the configuration requested are those advertised
735 *
736 * PARAMETERS :
737 *   @stream_list : streams to be configured
738 *
739 * RETURN     :
740 *
741 *==========================================================================*/
742int QCamera3HardwareInterface::validateStreamDimensions(
743        camera3_stream_configuration_t *streamList)
744{
745    int rc = NO_ERROR;
746    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
747    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
748    size_t count = 0;
749
750    camera3_stream_t *inputStream = NULL;
751    /*
752    * Loop through all streams to find input stream if it exists*
753    */
754    for (size_t i = 0; i< streamList->num_streams; i++) {
755        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
756            if (inputStream != NULL) {
757                ALOGE("%s: Error, Multiple input streams requested");
758                return -EINVAL;
759            }
760            inputStream = streamList->streams[i];
761        }
762    }
763    /*
764    * Loop through all streams requested in configuration
765    * Check if unsupported sizes have been requested on any of them
766    */
767    for (size_t j = 0; j < streamList->num_streams; j++) {
768        bool sizeFound = false;
769        size_t jpeg_sizes_cnt = 0;
770        camera3_stream_t *newStream = streamList->streams[j];
771
772        uint32_t rotatedHeight = newStream->height;
773        uint32_t rotatedWidth = newStream->width;
774        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
775                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
776            rotatedHeight = newStream->width;
777            rotatedWidth = newStream->height;
778        }
779
780        /*
781        * Sizes are different for each type of stream format check against
782        * appropriate table.
783        */
784        switch (newStream->format) {
785        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
786        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
787        case HAL_PIXEL_FORMAT_RAW10:
788            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
789            for (size_t i = 0; i < count; i++) {
790                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
791                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
792                    sizeFound = true;
793                    break;
794                }
795            }
796            break;
797        case HAL_PIXEL_FORMAT_BLOB:
798            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
799            /* Generate JPEG sizes table */
800            makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
801                    count,
802                    MAX_SIZES_CNT,
803                    available_processed_sizes);
804            jpeg_sizes_cnt = filterJpegSizes(
805                    available_jpeg_sizes,
806                    available_processed_sizes,
807                    count * 2,
808                    MAX_SIZES_CNT * 2,
809                    gCamCapability[mCameraId]->active_array_size,
810                    gCamCapability[mCameraId]->max_downscale_factor);
811
812            /* Verify set size against generated sizes table */
813            for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
814                if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
815                        ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
816                    sizeFound = true;
817                    break;
818                }
819            }
820            break;
821        case HAL_PIXEL_FORMAT_YCbCr_420_888:
822        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
823        default:
824            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
825                    || newStream->stream_type == CAMERA3_STREAM_INPUT
826                    || IS_USAGE_ZSL(newStream->usage)) {
827                if (((int32_t)rotatedWidth ==
828                                gCamCapability[mCameraId]->active_array_size.width) &&
829                                ((int32_t)rotatedHeight ==
830                                gCamCapability[mCameraId]->active_array_size.height)) {
831                    sizeFound = true;
832                    break;
833                }
834                /* We could potentially break here to enforce ZSL stream
835                 * set from frameworks always is full active array size
836                 * but it is not clear from the spc if framework will always
837                 * follow that, also we have logic to override to full array
838                 * size, so keeping the logic lenient at the moment
839                 */
840            }
841            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
842                    MAX_SIZES_CNT);
843            for (size_t i = 0; i < count; i++) {
844                if (((int32_t)rotatedWidth ==
845                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
846                            ((int32_t)rotatedHeight ==
847                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
848                    sizeFound = true;
849                    break;
850                }
851            }
852            break;
853        } /* End of switch(newStream->format) */
854
855        /* We error out even if a single stream has unsupported size set */
856        if (!sizeFound) {
857            ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
858                  "type:%d", __func__, rotatedWidth, rotatedHeight,
859                  newStream->format);
860            ALOGE("%s: Active array size is  %d x %d", __func__,
861                    gCamCapability[mCameraId]->active_array_size.width,
862                    gCamCapability[mCameraId]->active_array_size.height);
863            rc = -EINVAL;
864            break;
865        }
866    } /* End of for each stream */
867    return rc;
868}
869
870/*==============================================================================
871 * FUNCTION   : isSupportChannelNeeded
872 *
873 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
874 *
875 * PARAMETERS :
876 *   @stream_list : streams to be configured
877 *
878 * RETURN     : Boolen true/false decision
879 *
880 *==========================================================================*/
881bool QCamera3HardwareInterface::isSupportChannelNeeded(
882        camera3_stream_configuration_t *streamList,
883        cam_stream_size_info_t stream_config_info,
884        uint32_t fullFeatureMask)
885{
886    uint32_t i;
887    bool bSuperSetPresent = false;
888    /* Check for conditions where PProc pipeline does not have any streams*/
889    for (i = 0; i < stream_config_info.num_streams; i++) {
890        if (stream_config_info.postprocess_mask[i] == fullFeatureMask) {
891            bSuperSetPresent = true;
892            break;
893        }
894    }
895
896    if (bSuperSetPresent == false )
897        return true;
898
899    /* Dummy stream needed if only raw or jpeg streams present */
900    for (i = 0;i < streamList->num_streams;i++) {
901        switch(streamList->streams[i]->format) {
902            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
903            case HAL_PIXEL_FORMAT_RAW10:
904            case HAL_PIXEL_FORMAT_RAW16:
905            case HAL_PIXEL_FORMAT_BLOB:
906                break;
907            default:
908                return false;
909        }
910    }
911    return true;
912}
913
914/*==============================================================================
915 * FUNCTION   : getSensorOutputSize
916 *
917 * DESCRIPTION: Get sensor output size based on current stream configuratoin
918 *
919 * PARAMETERS :
920 *   @sensor_dim : sensor output dimension (output)
921 *
922 * RETURN     : int32_t type of status
923 *              NO_ERROR  -- success
924 *              none-zero failure code
925 *
926 *==========================================================================*/
927int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
928{
929    int32_t rc = NO_ERROR;
930
931    cam_dimension_t max_dim = {0, 0};
932    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
933        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
934            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
935        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
936            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
937    }
938
939    clear_metadata_buffer(mParameters);
940
941    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
942            max_dim);
943    if (rc != NO_ERROR) {
944        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
945        return rc;
946    }
947
948    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
949    if (rc != NO_ERROR) {
950        ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
951        return rc;
952    }
953
954    clear_metadata_buffer(mParameters);
955    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
956
957    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
958            mParameters);
959    if (rc != NO_ERROR) {
960        ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
961        return rc;
962    }
963
964    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
965    ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
966
967    return rc;
968}
969
970/*==============================================================================
971 * FUNCTION   : updatePowerHint
972 *
973 * DESCRIPTION: update power hint based on whether it's video mode or not.
974 *
975 * PARAMETERS :
976 *   @bWasVideo : whether video mode before the switch
977 *   @bIsVideo  : whether new mode is video or not.
978 *
979 * RETURN     : NULL
980 *
981 *==========================================================================*/
982void QCamera3HardwareInterface::updatePowerHint(bool bWasVideo, bool bIsVideo)
983{
984#ifdef HAS_MULTIMEDIA_HINTS
985    if (bWasVideo == bIsVideo)
986        return;
987
988    if (m_pPowerModule && m_pPowerModule->powerHint) {
989        if (bIsVideo)
990            m_pPowerModule->powerHint(m_pPowerModule,
991                    POWER_HINT_VIDEO_ENCODE, (void *)"state=1");
992        else
993            m_pPowerModule->powerHint(m_pPowerModule,
994                    POWER_HINT_VIDEO_ENCODE, (void *)"state=0");
995     }
996#endif
997}
998
999/*===========================================================================
1000 * FUNCTION   : configureStreams
1001 *
1002 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1003 *              and output streams.
1004 *
1005 * PARAMETERS :
1006 *   @stream_list : streams to be configured
1007 *
1008 * RETURN     :
1009 *
1010 *==========================================================================*/
1011int QCamera3HardwareInterface::configureStreams(
1012        camera3_stream_configuration_t *streamList)
1013{
1014    ATRACE_CALL();
1015    int rc = 0;
1016    bool bWasVideo = m_bIsVideo;
1017
1018    // Sanity check stream_list
1019    if (streamList == NULL) {
1020        ALOGE("%s: NULL stream configuration", __func__);
1021        return BAD_VALUE;
1022    }
1023    if (streamList->streams == NULL) {
1024        ALOGE("%s: NULL stream list", __func__);
1025        return BAD_VALUE;
1026    }
1027
1028    if (streamList->num_streams < 1) {
1029        ALOGE("%s: Bad number of streams requested: %d", __func__,
1030                streamList->num_streams);
1031        return BAD_VALUE;
1032    }
1033
1034    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1035        ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1036                MAX_NUM_STREAMS, streamList->num_streams);
1037        return BAD_VALUE;
1038    }
1039
1040    mOpMode = streamList->operation_mode;
1041    CDBG("%s: mOpMode: %d", __func__, mOpMode);
1042
1043    /* first invalidate all the steams in the mStreamList
1044     * if they appear again, they will be validated */
1045    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1046            it != mStreamInfo.end(); it++) {
1047        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1048        channel->stop();
1049        (*it)->status = INVALID;
1050    }
1051
1052    if (mRawDumpChannel) {
1053        mRawDumpChannel->stop();
1054        delete mRawDumpChannel;
1055        mRawDumpChannel = NULL;
1056    }
1057
1058    if (mSupportChannel)
1059        mSupportChannel->stop();
1060
1061    if (mAnalysisChannel) {
1062        mAnalysisChannel->stop();
1063    }
1064    if (mMetadataChannel) {
1065        /* If content of mStreamInfo is not 0, there is metadata stream */
1066        mMetadataChannel->stop();
1067    }
1068
1069    pthread_mutex_lock(&mMutex);
1070
1071    /* Check whether we have video stream */
1072    m_bIs4KVideo = false;
1073    m_bIsVideo = false;
1074    m_bEisSupportedSize = false;
1075    bool isZsl = false;
1076    uint32_t videoWidth = 0U;
1077    uint32_t videoHeight = 0U;
1078    size_t rawStreamCnt = 0;
1079    size_t stallStreamCnt = 0;
1080    size_t processedStreamCnt = 0;
1081    // Number of streams on ISP encoder path
1082    size_t numStreamsOnEncoder = 0;
1083    size_t numYuv888OnEncoder = 0;
1084    bool bYuv888OverrideJpeg = false;
1085    cam_dimension_t largeYuv888Size = {0, 0};
1086    cam_dimension_t maxViewfinderSize = {0, 0};
1087    bool bJpegExceeds4K = false;
1088    bool bUseCommonFeatureMask = false;
1089    uint32_t commonFeatureMask = 0;
1090    //@todo Remove fullFeatureMask and possibly m_bTnrEnabled once CPP checks
1091    //      both feature mask and param for TNR enable.
1092    uint32_t fullFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1093    if (m_bTnrEnabled != 0)
1094    {
1095        fullFeatureMask |= CAM_QCOM_FEATURE_CPP_TNR;
1096    }
1097    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1098    camera3_stream_t *inputStream = NULL;
1099    bool isJpeg = false;
1100    cam_dimension_t jpegSize = {0, 0};
1101
1102    /*EIS configuration*/
1103    bool eisSupported = false;
1104    bool oisSupported = false;
1105    int32_t margin_index = -1;
1106    uint8_t eis_prop_set;
1107    uint32_t maxEisWidth = 0;
1108    uint32_t maxEisHeight = 0;
1109    int32_t hal_version = CAM_HAL_V3;
1110
1111    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1112
1113    size_t count = IS_TYPE_MAX;
1114    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1115    for (size_t i = 0; i < count; i++) {
1116        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1117            eisSupported = true;
1118            margin_index = (int32_t)i;
1119            break;
1120        }
1121    }
1122
1123    count = CAM_OPT_STAB_MAX;
1124    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1125    for (size_t i = 0; i < count; i++) {
1126        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1127            oisSupported = true;
1128            break;
1129        }
1130    }
1131
1132    if (eisSupported) {
1133        maxEisWidth = MAX_EIS_WIDTH;
1134        maxEisHeight = MAX_EIS_HEIGHT;
1135    }
1136
1137    /* EIS setprop control */
1138    char eis_prop[PROPERTY_VALUE_MAX];
1139    memset(eis_prop, 0, sizeof(eis_prop));
1140    property_get("persist.camera.eis.enable", eis_prop, "0");
1141    eis_prop_set = (uint8_t)atoi(eis_prop);
1142
1143    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1144            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1145
1146    /* stream configurations */
1147    for (size_t i = 0; i < streamList->num_streams; i++) {
1148        camera3_stream_t *newStream = streamList->streams[i];
1149        ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1150                "height = %d, rotation = %d, usage = 0x%x",
1151                __func__, i, newStream->stream_type, newStream->format,
1152                newStream->width, newStream->height, newStream->rotation,
1153                newStream->usage);
1154        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1155                newStream->stream_type == CAMERA3_STREAM_INPUT){
1156            isZsl = true;
1157        }
1158        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1159            inputStream = newStream;
1160        }
1161
1162        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1163            isJpeg = true;
1164            jpegSize.width = newStream->width;
1165            jpegSize.height = newStream->height;
1166            if (newStream->width > VIDEO_4K_WIDTH ||
1167                    newStream->height > VIDEO_4K_HEIGHT)
1168                bJpegExceeds4K = true;
1169        }
1170
1171        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1172                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1173            m_bIsVideo = true;
1174            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1175                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1176                videoWidth = newStream->width;
1177                videoHeight = newStream->height;
1178                m_bIs4KVideo = true;
1179            }
1180            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1181                                  (newStream->height <= maxEisHeight);
1182        }
1183        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1184                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1185            switch (newStream->format) {
1186            case HAL_PIXEL_FORMAT_BLOB:
1187                stallStreamCnt++;
1188                if (isOnEncoder(maxViewfinderSize, newStream->width,
1189                        newStream->height)) {
1190                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1191                    numStreamsOnEncoder++;
1192                }
1193                break;
1194            case HAL_PIXEL_FORMAT_RAW10:
1195            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1196            case HAL_PIXEL_FORMAT_RAW16:
1197                rawStreamCnt++;
1198                break;
1199            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1200                processedStreamCnt++;
1201                if (isOnEncoder(maxViewfinderSize, newStream->width,
1202                        newStream->height)) {
1203                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1204                            IS_USAGE_ZSL(newStream->usage)) {
1205                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1206                    } else {
1207                        commonFeatureMask |= fullFeatureMask;
1208                    }
1209                    numStreamsOnEncoder++;
1210                }
1211                break;
1212            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1213                processedStreamCnt++;
1214                if (isOnEncoder(maxViewfinderSize, newStream->width,
1215                        newStream->height)) {
1216                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1217                    numStreamsOnEncoder++;
1218                    numYuv888OnEncoder++;
1219                    largeYuv888Size.width = newStream->width;
1220                    largeYuv888Size.height = newStream->height;
1221                }
1222                break;
1223            default:
1224                processedStreamCnt++;
1225                if (isOnEncoder(maxViewfinderSize, newStream->width,
1226                        newStream->height)) {
1227                    commonFeatureMask |= fullFeatureMask;
1228                    numStreamsOnEncoder++;
1229                }
1230                break;
1231            }
1232
1233        }
1234    }
1235
1236    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1237        !m_bIsVideo) {
1238        m_bEisEnable = false;
1239    }
1240
1241    /* Check if num_streams is sane */
1242    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1243            rawStreamCnt > MAX_RAW_STREAMS ||
1244            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1245        ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1246                __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1247        pthread_mutex_unlock(&mMutex);
1248        return -EINVAL;
1249    }
1250    /* Check whether we have zsl stream or 4k video case */
1251    if (isZsl && m_bIsVideo) {
1252        ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1253        pthread_mutex_unlock(&mMutex);
1254        return -EINVAL;
1255    }
1256    /* Check if stream sizes are sane */
1257    if (numStreamsOnEncoder > 2) {
1258        ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1259                __func__);
1260        pthread_mutex_unlock(&mMutex);
1261        return -EINVAL;
1262    } else if (1 < numStreamsOnEncoder){
1263        bUseCommonFeatureMask = true;
1264        CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1265                __func__);
1266    }
1267
1268    /* Check if BLOB size is greater than 4k in 4k recording case */
1269    if (m_bIs4KVideo && bJpegExceeds4K) {
1270        ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1271                __func__);
1272        pthread_mutex_unlock(&mMutex);
1273        return -EINVAL;
1274    }
1275
1276    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1277    // the YUV stream's size is greater or equal to the JPEG size, set common
1278    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1279    if (numYuv888OnEncoder && isJpeg &&
1280            largeYuv888Size.width >= jpegSize.width &&
1281            largeYuv888Size.height >= jpegSize.height) {
1282        bYuv888OverrideJpeg = true;
1283        commonFeatureMask = CAM_QCOM_FEATURE_NONE;
1284    }
1285
1286    rc = validateStreamDimensions(streamList);
1287    if (rc == NO_ERROR) {
1288        rc = validateStreamRotations(streamList);
1289    }
1290    if (rc != NO_ERROR) {
1291        ALOGE("%s: Invalid stream configuration requested!", __func__);
1292        pthread_mutex_unlock(&mMutex);
1293        return rc;
1294    }
1295
1296    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1297    camera3_stream_t *jpegStream = NULL;
1298    for (size_t i = 0; i < streamList->num_streams; i++) {
1299        camera3_stream_t *newStream = streamList->streams[i];
1300        CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1301                "stream size : %d x %d, stream rotation = %d",
1302                __func__, newStream->stream_type, newStream->format,
1303                newStream->width, newStream->height, newStream->rotation);
1304        //if the stream is in the mStreamList validate it
1305        bool stream_exists = false;
1306        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1307                it != mStreamInfo.end(); it++) {
1308            if ((*it)->stream == newStream) {
1309                QCamera3ProcessingChannel *channel =
1310                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1311                stream_exists = true;
1312                if (channel)
1313                    delete channel;
1314                (*it)->status = VALID;
1315                (*it)->stream->priv = NULL;
1316                (*it)->channel = NULL;
1317            }
1318        }
1319        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1320            //new stream
1321            stream_info_t* stream_info;
1322            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1323            if (!stream_info) {
1324               ALOGE("%s: Could not allocate stream info", __func__);
1325               rc = -ENOMEM;
1326               pthread_mutex_unlock(&mMutex);
1327               return rc;
1328            }
1329            stream_info->stream = newStream;
1330            stream_info->status = VALID;
1331            stream_info->channel = NULL;
1332            mStreamInfo.push_back(stream_info);
1333        }
1334        /* Covers Opaque ZSL and API1 F/W ZSL */
1335        if (IS_USAGE_ZSL(newStream->usage)
1336                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1337            if (zslStream != NULL) {
1338                ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1339                pthread_mutex_unlock(&mMutex);
1340                return BAD_VALUE;
1341            }
1342            zslStream = newStream;
1343        }
1344        /* Covers YUV reprocess */
1345        if (inputStream != NULL) {
1346            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1347                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1348                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1349                    && inputStream->width == newStream->width
1350                    && inputStream->height == newStream->height) {
1351                if (zslStream != NULL) {
1352                    /* This scenario indicates multiple YUV streams with same size
1353                     * as input stream have been requested, since zsl stream handle
1354                     * is solely use for the purpose of overriding the size of streams
1355                     * which share h/w streams we will just make a guess here as to
1356                     * which of the stream is a ZSL stream, this will be refactored
1357                     * once we make generic logic for streams sharing encoder output
1358                     */
1359                    CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1360                }
1361                zslStream = newStream;
1362            }
1363        }
1364        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1365            jpegStream = newStream;
1366        }
1367    }
1368
1369    /* If a zsl stream is set, we know that we have configured at least one input or
1370       bidirectional stream */
1371    if (NULL != zslStream) {
1372        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1373        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1374        mInputStreamInfo.format = zslStream->format;
1375        mInputStreamInfo.usage = zslStream->usage;
1376        CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1377                __func__, mInputStreamInfo.dim.width,
1378                mInputStreamInfo.dim.height,
1379                mInputStreamInfo.format, mInputStreamInfo.usage);
1380    }
1381
1382    cleanAndSortStreamInfo();
1383    if (mMetadataChannel) {
1384        delete mMetadataChannel;
1385        mMetadataChannel = NULL;
1386    }
1387    if (mSupportChannel) {
1388        delete mSupportChannel;
1389        mSupportChannel = NULL;
1390    }
1391
1392    if (mAnalysisChannel) {
1393        delete mAnalysisChannel;
1394        mAnalysisChannel = NULL;
1395    }
1396
1397    if (mDummyBatchChannel) {
1398        delete mDummyBatchChannel;
1399        mDummyBatchChannel = NULL;
1400    }
1401
1402    //Create metadata channel and initialize it
1403    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1404                    mCameraHandle->ops, captureResultCb,
1405                    &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1406    if (mMetadataChannel == NULL) {
1407        ALOGE("%s: failed to allocate metadata channel", __func__);
1408        rc = -ENOMEM;
1409        pthread_mutex_unlock(&mMutex);
1410        return rc;
1411    }
1412    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1413    if (rc < 0) {
1414        ALOGE("%s: metadata channel initialization failed", __func__);
1415        delete mMetadataChannel;
1416        mMetadataChannel = NULL;
1417        pthread_mutex_unlock(&mMutex);
1418        return rc;
1419    }
1420
1421    // Create analysis stream all the time, even when h/w support is not available
1422    {
1423        mAnalysisChannel = new QCamera3SupportChannel(
1424                mCameraHandle->camera_handle,
1425                mCameraHandle->ops,
1426                &gCamCapability[mCameraId]->padding_info,
1427                fullFeatureMask,
1428                CAM_STREAM_TYPE_ANALYSIS,
1429                &gCamCapability[mCameraId]->analysis_recommended_res,
1430                gCamCapability[mCameraId]->analysis_recommended_format,
1431                this,
1432                0); // force buffer count to 0
1433        if (!mAnalysisChannel) {
1434            ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1435            pthread_mutex_unlock(&mMutex);
1436            return -ENOMEM;
1437        }
1438    }
1439
1440    bool isRawStreamRequested = false;
1441    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1442    /* Allocate channel objects for the requested streams */
1443    for (size_t i = 0; i < streamList->num_streams; i++) {
1444        camera3_stream_t *newStream = streamList->streams[i];
1445        uint32_t stream_usage = newStream->usage;
1446        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1447        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1448        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1449                || IS_USAGE_ZSL(newStream->usage)) &&
1450            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1451            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1452            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1453        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1454                CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1455        } else {
1456            //for non zsl streams find out the format
1457            switch (newStream->format) {
1458            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1459              {
1460                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1461                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1462                 } else {
1463                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1464                 }
1465                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1466                         = fullFeatureMask;
1467
1468                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1469                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1470                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1471                             newStream->height;
1472                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1473                             newStream->width;
1474                 }
1475              }
1476              break;
1477           case HAL_PIXEL_FORMAT_YCbCr_420_888:
1478              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1479              if (isOnEncoder(maxViewfinderSize, newStream->width,
1480                      newStream->height)) {
1481                  if (bUseCommonFeatureMask)
1482                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1483                              commonFeatureMask;
1484                  else
1485                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1486                              CAM_QCOM_FEATURE_NONE;
1487              } else {
1488                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1489                          fullFeatureMask;
1490              }
1491              break;
1492           case HAL_PIXEL_FORMAT_BLOB:
1493              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1494              if (m_bIs4KVideo && !isZsl) {
1495                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1496                          = fullFeatureMask;
1497              } else {
1498                  if (bUseCommonFeatureMask &&
1499                          isOnEncoder(maxViewfinderSize, newStream->width,
1500                                  newStream->height)) {
1501                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1502                  } else {
1503                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1504                  }
1505              }
1506              if (isZsl) {
1507                  if (zslStream) {
1508                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1509                              (int32_t)zslStream->width;
1510                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1511                              (int32_t)zslStream->height;
1512                  } else {
1513                      ALOGE("%s: Error, No ZSL stream identified",__func__);
1514                      pthread_mutex_unlock(&mMutex);
1515                      return -EINVAL;
1516                  }
1517              } else if (m_bIs4KVideo) {
1518                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1519                          (int32_t)videoWidth;
1520                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1521                          (int32_t)videoHeight;
1522              } else if (bYuv888OverrideJpeg) {
1523                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1524                          (int32_t)largeYuv888Size.width;
1525                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1526                          (int32_t)largeYuv888Size.height;
1527              }
1528              break;
1529           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1530           case HAL_PIXEL_FORMAT_RAW16:
1531           case HAL_PIXEL_FORMAT_RAW10:
1532              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1533              isRawStreamRequested = true;
1534              break;
1535           default:
1536              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1537              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1538              break;
1539            }
1540
1541        }
1542
1543        if (newStream->priv == NULL) {
1544            //New stream, construct channel
1545            switch (newStream->stream_type) {
1546            case CAMERA3_STREAM_INPUT:
1547                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1548                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1549                break;
1550            case CAMERA3_STREAM_BIDIRECTIONAL:
1551                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1552                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1553                break;
1554            case CAMERA3_STREAM_OUTPUT:
1555                /* For video encoding stream, set read/write rarely
1556                 * flag so that they may be set to un-cached */
1557                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1558                    newStream->usage |=
1559                         (GRALLOC_USAGE_SW_READ_RARELY |
1560                         GRALLOC_USAGE_SW_WRITE_RARELY |
1561                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1562                else if (IS_USAGE_ZSL(newStream->usage))
1563                    CDBG("%s: ZSL usage flag skipping", __func__);
1564                else if (newStream == zslStream
1565                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1566                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1567                } else
1568                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1569                break;
1570            default:
1571                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1572                break;
1573            }
1574
1575            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1576                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1577                QCamera3ProcessingChannel *channel = NULL;
1578                switch (newStream->format) {
1579                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1580                    if ((newStream->usage &
1581                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1582                            (streamList->operation_mode ==
1583                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1584                    ) {
1585                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1586                                mCameraHandle->ops, captureResultCb,
1587                                &gCamCapability[mCameraId]->padding_info,
1588                                this,
1589                                newStream,
1590                                (cam_stream_type_t)
1591                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1592                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1593                                mMetadataChannel,
1594                                0); //heap buffers are not required for HFR video channel
1595                        if (channel == NULL) {
1596                            ALOGE("%s: allocation of channel failed", __func__);
1597                            pthread_mutex_unlock(&mMutex);
1598                            return -ENOMEM;
1599                        }
1600                        //channel->getNumBuffers() will return 0 here so use
1601                        //MAX_INFLIGH_HFR_REQUESTS
1602                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1603                        newStream->priv = channel;
1604                        ALOGI("%s: num video buffers in HFR mode: %d",
1605                                __func__, MAX_INFLIGHT_HFR_REQUESTS);
1606                    } else {
1607                        /* Copy stream contents in HFR preview only case to create
1608                         * dummy batch channel so that sensor streaming is in
1609                         * HFR mode */
1610                        if (!m_bIsVideo && (streamList->operation_mode ==
1611                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1612                            mDummyBatchStream = *newStream;
1613                        }
1614                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1615                                mCameraHandle->ops, captureResultCb,
1616                                &gCamCapability[mCameraId]->padding_info,
1617                                this,
1618                                newStream,
1619                                (cam_stream_type_t)
1620                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1621                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1622                                mMetadataChannel,
1623                                MAX_INFLIGHT_REQUESTS);
1624                        if (channel == NULL) {
1625                            ALOGE("%s: allocation of channel failed", __func__);
1626                            pthread_mutex_unlock(&mMutex);
1627                            return -ENOMEM;
1628                        }
1629                        newStream->max_buffers = channel->getNumBuffers();
1630                        newStream->priv = channel;
1631                    }
1632                    break;
1633                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1634                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1635                            mCameraHandle->ops, captureResultCb,
1636                            &gCamCapability[mCameraId]->padding_info,
1637                            this,
1638                            newStream,
1639                            (cam_stream_type_t)
1640                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1641                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1642                            mMetadataChannel);
1643                    if (channel == NULL) {
1644                        ALOGE("%s: allocation of YUV channel failed", __func__);
1645                        pthread_mutex_unlock(&mMutex);
1646                        return -ENOMEM;
1647                    }
1648                    newStream->max_buffers = channel->getNumBuffers();
1649                    newStream->priv = channel;
1650                    break;
1651                }
1652                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1653                case HAL_PIXEL_FORMAT_RAW16:
1654                case HAL_PIXEL_FORMAT_RAW10:
1655                    mRawChannel = new QCamera3RawChannel(
1656                            mCameraHandle->camera_handle,
1657                            mCameraHandle->ops, captureResultCb,
1658                            &gCamCapability[mCameraId]->padding_info,
1659                            this, newStream, CAM_QCOM_FEATURE_NONE,
1660                            mMetadataChannel,
1661                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1662                    if (mRawChannel == NULL) {
1663                        ALOGE("%s: allocation of raw channel failed", __func__);
1664                        pthread_mutex_unlock(&mMutex);
1665                        return -ENOMEM;
1666                    }
1667                    newStream->max_buffers = mRawChannel->getNumBuffers();
1668                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1669                    break;
1670                case HAL_PIXEL_FORMAT_BLOB:
1671                    // Max live snapshot inflight buffer is 1. This is to mitigate
1672                    // frame drop issues for video snapshot. The more buffers being
1673                    // allocated, the more frame drops there are.
1674                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
1675                            mCameraHandle->ops, captureResultCb,
1676                            &gCamCapability[mCameraId]->padding_info, this, newStream,
1677                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1678                            m_bIs4KVideo, isZsl, mMetadataChannel,
1679                            (m_bIsVideo ? 1 : MAX_INFLIGHT_REQUESTS));
1680                    if (mPictureChannel == NULL) {
1681                        ALOGE("%s: allocation of channel failed", __func__);
1682                        pthread_mutex_unlock(&mMutex);
1683                        return -ENOMEM;
1684                    }
1685                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1686                    newStream->max_buffers = mPictureChannel->getNumBuffers();
1687                    mPictureChannel->overrideYuvSize(
1688                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1689                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1690                    break;
1691
1692                default:
1693                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1694                    break;
1695                }
1696            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1697                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1698            } else {
1699                ALOGE("%s: Error, Unknown stream type", __func__);
1700                return -EINVAL;
1701            }
1702
1703            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1704                    it != mStreamInfo.end(); it++) {
1705                if ((*it)->stream == newStream) {
1706                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1707                    break;
1708                }
1709            }
1710        } else {
1711            // Channel already exists for this stream
1712            // Do nothing for now
1713        }
1714
1715    /* Do not add entries for input stream in metastream info
1716         * since there is no real stream associated with it
1717         */
1718        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1719            mStreamConfigInfo.num_streams++;
1720    }
1721
1722    //RAW DUMP channel
1723    if (mEnableRawDump && isRawStreamRequested == false){
1724        cam_dimension_t rawDumpSize;
1725        rawDumpSize = getMaxRawSize(mCameraId);
1726        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1727                                  mCameraHandle->ops,
1728                                  rawDumpSize,
1729                                  &gCamCapability[mCameraId]->padding_info,
1730                                  this, CAM_QCOM_FEATURE_NONE);
1731        if (!mRawDumpChannel) {
1732            ALOGE("%s: Raw Dump channel cannot be created", __func__);
1733            pthread_mutex_unlock(&mMutex);
1734            return -ENOMEM;
1735        }
1736    }
1737
1738
1739    if (mAnalysisChannel) {
1740        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1741                gCamCapability[mCameraId]->analysis_recommended_res;
1742        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1743                CAM_STREAM_TYPE_ANALYSIS;
1744        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1745                CAM_QCOM_FEATURE_FACE_DETECTION;
1746        mStreamConfigInfo.num_streams++;
1747    }
1748
1749    if (isSupportChannelNeeded(streamList, mStreamConfigInfo, fullFeatureMask)) {
1750        mSupportChannel = new QCamera3SupportChannel(
1751                mCameraHandle->camera_handle,
1752                mCameraHandle->ops,
1753                &gCamCapability[mCameraId]->padding_info,
1754                fullFeatureMask,
1755                CAM_STREAM_TYPE_CALLBACK,
1756                &QCamera3SupportChannel::kDim,
1757                CAM_FORMAT_YUV_420_NV21,
1758                this);
1759        if (!mSupportChannel) {
1760            ALOGE("%s: dummy channel cannot be created", __func__);
1761            pthread_mutex_unlock(&mMutex);
1762            return -ENOMEM;
1763        }
1764    }
1765
1766    if (mSupportChannel) {
1767        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1768                QCamera3SupportChannel::kDim;
1769        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1770                CAM_STREAM_TYPE_CALLBACK;
1771        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1772                fullFeatureMask;
1773        mStreamConfigInfo.num_streams++;
1774    }
1775
1776    if (mRawDumpChannel) {
1777        cam_dimension_t rawSize;
1778        rawSize = getMaxRawSize(mCameraId);
1779        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1780                rawSize;
1781        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1782                CAM_STREAM_TYPE_RAW;
1783        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1784                CAM_QCOM_FEATURE_NONE;
1785        mStreamConfigInfo.num_streams++;
1786    }
1787    /* In HFR mode, if video stream is not added, create a dummy channel so that
1788     * ISP can create a batch mode even for preview only case. This channel is
1789     * never 'start'ed (no stream-on), it is only 'initialized'  */
1790    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1791            !m_bIsVideo) {
1792        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1793                mCameraHandle->ops, captureResultCb,
1794                &gCamCapability[mCameraId]->padding_info,
1795                this,
1796                &mDummyBatchStream,
1797                CAM_STREAM_TYPE_VIDEO,
1798                fullFeatureMask,
1799                mMetadataChannel);
1800        if (NULL == mDummyBatchChannel) {
1801            ALOGE("%s: creation of mDummyBatchChannel failed."
1802                    "Preview will use non-hfr sensor mode ", __func__);
1803        }
1804    }
1805    if (mDummyBatchChannel) {
1806        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1807                mDummyBatchStream.width;
1808        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1809                mDummyBatchStream.height;
1810        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1811                CAM_STREAM_TYPE_VIDEO;
1812        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1813                fullFeatureMask;
1814        mStreamConfigInfo.num_streams++;
1815    }
1816
1817    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1818    mStreamConfigInfo.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
1819
1820    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1821    for (pendingRequestIterator i = mPendingRequestsList.begin();
1822            i != mPendingRequestsList.end();) {
1823        i = erasePendingRequest(i);
1824    }
1825    mPendingFrameDropList.clear();
1826    // Initialize/Reset the pending buffers list
1827    mPendingBuffersMap.num_buffers = 0;
1828    mPendingBuffersMap.mPendingBufferList.clear();
1829    mPendingReprocessResultList.clear();
1830
1831    mFirstRequest = true;
1832    mCurJpegMeta.clear();
1833    //Get min frame duration for this streams configuration
1834    deriveMinFrameDuration();
1835
1836    /* Turn on video hint only if video stream is configured */
1837    updatePowerHint(bWasVideo, m_bIsVideo);
1838
1839    pthread_mutex_unlock(&mMutex);
1840    return rc;
1841}
1842
1843/*===========================================================================
1844 * FUNCTION   : validateCaptureRequest
1845 *
1846 * DESCRIPTION: validate a capture request from camera service
1847 *
1848 * PARAMETERS :
1849 *   @request : request from framework to process
1850 *
1851 * RETURN     :
1852 *
1853 *==========================================================================*/
1854int QCamera3HardwareInterface::validateCaptureRequest(
1855                    camera3_capture_request_t *request)
1856{
1857    ssize_t idx = 0;
1858    const camera3_stream_buffer_t *b;
1859    CameraMetadata meta;
1860
1861    /* Sanity check the request */
1862    if (request == NULL) {
1863        ALOGE("%s: NULL capture request", __func__);
1864        return BAD_VALUE;
1865    }
1866
1867    if (request->settings == NULL && mFirstRequest) {
1868        /*settings cannot be null for the first request*/
1869        return BAD_VALUE;
1870    }
1871
1872    uint32_t frameNumber = request->frame_number;
1873    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1874        ALOGE("%s: Request %d: No output buffers provided!",
1875                __FUNCTION__, frameNumber);
1876        return BAD_VALUE;
1877    }
1878    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
1879        ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
1880                __func__, request->num_output_buffers, MAX_NUM_STREAMS);
1881        return BAD_VALUE;
1882    }
1883    if (request->input_buffer != NULL) {
1884        b = request->input_buffer;
1885        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1886            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1887                    __func__, frameNumber, (long)idx);
1888            return BAD_VALUE;
1889        }
1890        if (b->release_fence != -1) {
1891            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1892                    __func__, frameNumber, (long)idx);
1893            return BAD_VALUE;
1894        }
1895        if (b->buffer == NULL) {
1896            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1897                    __func__, frameNumber, (long)idx);
1898            return BAD_VALUE;
1899        }
1900    }
1901
1902    // Validate all buffers
1903    b = request->output_buffers;
1904    do {
1905        QCamera3ProcessingChannel *channel =
1906                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
1907        if (channel == NULL) {
1908            ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
1909                    __func__, frameNumber, (long)idx);
1910            return BAD_VALUE;
1911        }
1912        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1913            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
1914                    __func__, frameNumber, (long)idx);
1915            return BAD_VALUE;
1916        }
1917        if (b->release_fence != -1) {
1918            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
1919                    __func__, frameNumber, (long)idx);
1920            return BAD_VALUE;
1921        }
1922        if (b->buffer == NULL) {
1923            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
1924                    __func__, frameNumber, (long)idx);
1925            return BAD_VALUE;
1926        }
1927        if (*(b->buffer) == NULL) {
1928            ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
1929                    __func__, frameNumber, (long)idx);
1930            return BAD_VALUE;
1931        }
1932        idx++;
1933        b = request->output_buffers + idx;
1934    } while (idx < (ssize_t)request->num_output_buffers);
1935
1936    return NO_ERROR;
1937}
1938
1939/*===========================================================================
1940 * FUNCTION   : deriveMinFrameDuration
1941 *
1942 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
1943 *              on currently configured streams.
1944 *
1945 * PARAMETERS : NONE
1946 *
1947 * RETURN     : NONE
1948 *
1949 *==========================================================================*/
1950void QCamera3HardwareInterface::deriveMinFrameDuration()
1951{
1952    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
1953
1954    maxJpegDim = 0;
1955    maxProcessedDim = 0;
1956    maxRawDim = 0;
1957
1958    // Figure out maximum jpeg, processed, and raw dimensions
1959    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1960        it != mStreamInfo.end(); it++) {
1961
1962        // Input stream doesn't have valid stream_type
1963        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
1964            continue;
1965
1966        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
1967        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
1968            if (dimension > maxJpegDim)
1969                maxJpegDim = dimension;
1970        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
1971                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
1972                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
1973            if (dimension > maxRawDim)
1974                maxRawDim = dimension;
1975        } else {
1976            if (dimension > maxProcessedDim)
1977                maxProcessedDim = dimension;
1978        }
1979    }
1980
1981    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
1982            MAX_SIZES_CNT);
1983
1984    //Assume all jpeg dimensions are in processed dimensions.
1985    if (maxJpegDim > maxProcessedDim)
1986        maxProcessedDim = maxJpegDim;
1987    //Find the smallest raw dimension that is greater or equal to jpeg dimension
1988    if (maxProcessedDim > maxRawDim) {
1989        maxRawDim = INT32_MAX;
1990
1991        for (size_t i = 0; i < count; i++) {
1992            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
1993                    gCamCapability[mCameraId]->raw_dim[i].height;
1994            if (dimension >= maxProcessedDim && dimension < maxRawDim)
1995                maxRawDim = dimension;
1996        }
1997    }
1998
1999    //Find minimum durations for processed, jpeg, and raw
2000    for (size_t i = 0; i < count; i++) {
2001        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2002                gCamCapability[mCameraId]->raw_dim[i].height) {
2003            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2004            break;
2005        }
2006    }
2007    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2008    for (size_t i = 0; i < count; i++) {
2009        if (maxProcessedDim ==
2010                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2011                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2012            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2013            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2014            break;
2015        }
2016    }
2017}
2018
2019/*===========================================================================
2020 * FUNCTION   : getMinFrameDuration
2021 *
2022 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2023 *              and current request configuration.
2024 *
2025 * PARAMETERS : @request: requset sent by the frameworks
2026 *
2027 * RETURN     : min farme duration for a particular request
2028 *
2029 *==========================================================================*/
2030int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2031{
2032    bool hasJpegStream = false;
2033    bool hasRawStream = false;
2034    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2035        const camera3_stream_t *stream = request->output_buffers[i].stream;
2036        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2037            hasJpegStream = true;
2038        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2039                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2040                stream->format == HAL_PIXEL_FORMAT_RAW16)
2041            hasRawStream = true;
2042    }
2043
2044    if (!hasJpegStream)
2045        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2046    else
2047        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2048}
2049
2050/*===========================================================================
2051 * FUNCTION   : handlePendingReprocResults
2052 *
2053 * DESCRIPTION: check and notify on any pending reprocess results
2054 *
2055 * PARAMETERS :
2056 *   @frame_number   : Pending request frame number
2057 *
2058 * RETURN     : int32_t type of status
2059 *              NO_ERROR  -- success
2060 *              none-zero failure code
2061 *==========================================================================*/
2062int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2063{
2064    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2065            j != mPendingReprocessResultList.end(); j++) {
2066        if (j->frame_number == frame_number) {
2067            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2068
2069            CDBG("%s: Delayed reprocess notify %d", __func__,
2070                    frame_number);
2071
2072            for (pendingRequestIterator k = mPendingRequestsList.begin();
2073                    k != mPendingRequestsList.end(); k++) {
2074
2075                if (k->frame_number == j->frame_number) {
2076                    CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2077                            "Take it out!!", __func__,
2078                            k->frame_number);
2079
2080                    camera3_capture_result result;
2081                    memset(&result, 0, sizeof(camera3_capture_result));
2082                    result.frame_number = frame_number;
2083                    result.num_output_buffers = 1;
2084                    result.output_buffers =  &j->buffer;
2085                    result.input_buffer = k->input_buffer;
2086                    result.result = k->settings;
2087                    result.partial_result = PARTIAL_RESULT_COUNT;
2088                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2089
2090                    erasePendingRequest(k);
2091                    break;
2092                }
2093            }
2094            mPendingReprocessResultList.erase(j);
2095            break;
2096        }
2097    }
2098    return NO_ERROR;
2099}
2100
2101/*===========================================================================
2102 * FUNCTION   : handleBatchMetadata
2103 *
2104 * DESCRIPTION: Handles metadata buffer callback in batch mode
2105 *
2106 * PARAMETERS : @metadata_buf: metadata buffer
2107 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2108 *                 the meta buf in this method
2109 *
2110 * RETURN     :
2111 *
2112 *==========================================================================*/
2113void QCamera3HardwareInterface::handleBatchMetadata(
2114        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2115{
2116    ATRACE_CALL();
2117
2118    if (NULL == metadata_buf) {
2119        ALOGE("%s: metadata_buf is NULL", __func__);
2120        return;
2121    }
2122    /* In batch mode, the metdata will contain the frame number and timestamp of
2123     * the last frame in the batch. Eg: a batch containing buffers from request
2124     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2125     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2126     * multiple process_capture_results */
2127    metadata_buffer_t *metadata =
2128            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2129    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2130    uint32_t last_frame_number, last_urgent_frame_number;
2131    uint32_t frame_number, urgent_frame_number = 0;
2132    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2133    bool invalid_metadata = false;
2134    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2135    size_t loopCount = 1;
2136
2137    int32_t *p_frame_number_valid =
2138            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2139    uint32_t *p_frame_number =
2140            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2141    int64_t *p_capture_time =
2142            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2143    int32_t *p_urgent_frame_number_valid =
2144            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2145    uint32_t *p_urgent_frame_number =
2146            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2147
2148    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2149            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2150            (NULL == p_urgent_frame_number)) {
2151        ALOGE("%s: Invalid metadata", __func__);
2152        invalid_metadata = true;
2153    } else {
2154        frame_number_valid = *p_frame_number_valid;
2155        last_frame_number = *p_frame_number;
2156        last_frame_capture_time = *p_capture_time;
2157        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2158        last_urgent_frame_number = *p_urgent_frame_number;
2159    }
2160
2161    // If reported capture_time is 0, skip handling this metadata
2162    if (!last_frame_capture_time) {
2163        goto done_batch_metadata;
2164    }
2165    /* In batchmode, when no video buffers are requested, set_parms are sent
2166     * for every capture_request. The difference between consecutive urgent
2167     * frame numbers and frame numbers should be used to interpolate the
2168     * corresponding frame numbers and time stamps */
2169    if (urgent_frame_number_valid) {
2170        /* Frame numbers start with 0, handle it in the else condition */
2171        if (last_urgent_frame_number &&
2172                (last_urgent_frame_number >= mPrevUrgentFrameNumber)) {
2173            urgentFrameNumDiff = last_urgent_frame_number - mPrevUrgentFrameNumber;
2174        } else {
2175            urgentFrameNumDiff = 1;
2176        }
2177        mPrevUrgentFrameNumber = last_urgent_frame_number;
2178    }
2179    if (frame_number_valid) {
2180        /* Frame numbers start with 0, handle it in the else condition */
2181        if(last_frame_number && (last_frame_number >= mPrevFrameNumber)) {
2182            frameNumDiff = last_frame_number - mPrevFrameNumber;
2183        } else {
2184            frameNumDiff = 1;
2185        }
2186        mPrevFrameNumber = last_frame_number;
2187    }
2188    if (urgent_frame_number_valid || frame_number_valid) {
2189        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2190        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2191            ALOGE("%s: urgentFrameNumDiff: %d", __func__, urgentFrameNumDiff);
2192        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2193            ALOGE("%s: frameNumDiff: %d", __func__, frameNumDiff);
2194
2195    }
2196
2197    CDBG("%s: urgent_frm: valid: %d frm_num: %d previous frm_num: %d",
2198            __func__, urgent_frame_number_valid, last_urgent_frame_number,
2199            mPrevUrgentFrameNumber);
2200    CDBG("%s:        frm: valid: %d frm_num: %d previous frm_num:: %d",
2201            __func__, frame_number_valid, last_frame_number, mPrevFrameNumber);
2202
2203    //TODO: Need to ensure, metadata is not posted with the same frame numbers
2204    //when urgentFrameNumDiff != frameNumDiff
2205    for (size_t i = 0; i < loopCount; i++) {
2206        /* handleMetadataWithLock is called even for invalid_metadata for
2207         * pipeline depth calculation */
2208        if (!invalid_metadata) {
2209            /* Infer frame number. Batch metadata contains frame number of the
2210             * last frame */
2211            if (urgent_frame_number_valid) {
2212                if (i < urgentFrameNumDiff) {
2213                    urgent_frame_number =
2214                            last_urgent_frame_number + 1 - urgentFrameNumDiff + i;
2215                    CDBG("%s: inferred urgent frame_number: %d",
2216                            __func__, urgent_frame_number);
2217                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2218                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2219                } else {
2220                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2221                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2222                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2223                }
2224            }
2225
2226            /* Infer frame number. Batch metadata contains frame number of the
2227             * last frame */
2228            if (frame_number_valid) {
2229                if (i < frameNumDiff) {
2230                    frame_number = last_frame_number + 1 - frameNumDiff + i;
2231                    CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2232                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2233                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2234                } else {
2235                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2236                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2237                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2238                }
2239            }
2240
2241            //Infer timestamp
2242            first_frame_capture_time = last_frame_capture_time -
2243                    (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2244            capture_time =
2245                    first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2246            ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2247                    CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2248            CDBG("%s: batch capture_time: %lld, capture_time: %lld",
2249                    __func__, last_frame_capture_time, capture_time);
2250        }
2251        pthread_mutex_lock(&mMutex);
2252        handleMetadataWithLock(metadata_buf,
2253                false /* free_and_bufdone_meta_buf */);
2254        pthread_mutex_unlock(&mMutex);
2255    }
2256
2257done_batch_metadata:
2258    /* BufDone metadata buffer */
2259    if (free_and_bufdone_meta_buf) {
2260        mMetadataChannel->bufDone(metadata_buf);
2261        free(metadata_buf);
2262    }
2263}
2264
2265/*===========================================================================
2266 * FUNCTION   : handleMetadataWithLock
2267 *
2268 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2269 *
2270 * PARAMETERS : @metadata_buf: metadata buffer
2271 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2272 *                 the meta buf in this method
2273 *
2274 * RETURN     :
2275 *
2276 *==========================================================================*/
2277void QCamera3HardwareInterface::handleMetadataWithLock(
2278    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2279{
2280    ATRACE_CALL();
2281
2282    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2283    int32_t frame_number_valid, urgent_frame_number_valid;
2284    uint32_t frame_number, urgent_frame_number;
2285    int64_t capture_time;
2286
2287    int32_t *p_frame_number_valid =
2288            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2289    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2290    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2291    int32_t *p_urgent_frame_number_valid =
2292            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2293    uint32_t *p_urgent_frame_number =
2294            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2295    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2296            metadata) {
2297        CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2298                __func__, *p_frame_number_valid, *p_frame_number);
2299    }
2300
2301    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2302            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2303        ALOGE("%s: Invalid metadata", __func__);
2304        if (free_and_bufdone_meta_buf) {
2305            mMetadataChannel->bufDone(metadata_buf);
2306            free(metadata_buf);
2307        }
2308        goto done_metadata;
2309    } else {
2310        frame_number_valid = *p_frame_number_valid;
2311        frame_number = *p_frame_number;
2312        capture_time = *p_capture_time;
2313        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2314        urgent_frame_number = *p_urgent_frame_number;
2315    }
2316    //Partial result on process_capture_result for timestamp
2317    if (urgent_frame_number_valid) {
2318        CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2319          __func__, urgent_frame_number, capture_time);
2320
2321        //Recieved an urgent Frame Number, handle it
2322        //using partial results
2323        for (pendingRequestIterator i =
2324                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2325            CDBG("%s: Iterator Frame = %d urgent frame = %d",
2326                __func__, i->frame_number, urgent_frame_number);
2327
2328            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2329                (i->partial_result_cnt == 0)) {
2330                ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2331                    __func__, i->frame_number);
2332            }
2333
2334            if (i->frame_number == urgent_frame_number &&
2335                     i->bUrgentReceived == 0) {
2336
2337                camera3_capture_result_t result;
2338                memset(&result, 0, sizeof(camera3_capture_result_t));
2339
2340                i->partial_result_cnt++;
2341                i->bUrgentReceived = 1;
2342                // Extract 3A metadata
2343                result.result =
2344                    translateCbUrgentMetadataToResultMetadata(metadata);
2345                // Populate metadata result
2346                result.frame_number = urgent_frame_number;
2347                result.num_output_buffers = 0;
2348                result.output_buffers = NULL;
2349                result.partial_result = i->partial_result_cnt;
2350
2351                mCallbackOps->process_capture_result(mCallbackOps, &result);
2352                CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2353                     __func__, result.frame_number, capture_time);
2354                free_camera_metadata((camera_metadata_t *)result.result);
2355                break;
2356            }
2357        }
2358    }
2359
2360    if (!frame_number_valid) {
2361        CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2362        if (free_and_bufdone_meta_buf) {
2363            mMetadataChannel->bufDone(metadata_buf);
2364            free(metadata_buf);
2365        }
2366        goto done_metadata;
2367    }
2368    CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2369            frame_number, capture_time);
2370
2371    for (pendingRequestIterator i = mPendingRequestsList.begin();
2372            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2373        // Flush out all entries with less or equal frame numbers.
2374
2375        camera3_capture_result_t result;
2376        memset(&result, 0, sizeof(camera3_capture_result_t));
2377
2378        CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2379        i->partial_result_cnt++;
2380        result.partial_result = i->partial_result_cnt;
2381
2382        // Check whether any stream buffer corresponding to this is dropped or not
2383        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2384        // The API does not expect a blob buffer to be dropped
2385        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2386            /* Clear notify_msg structure */
2387            camera3_notify_msg_t notify_msg;
2388            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2389            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2390                    j != i->buffers.end(); j++) {
2391               if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2392                   QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2393                   uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2394                   for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2395                       if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2396                           // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2397                           ALOGW("%s: Start of reporting error frame#=%u, streamID=%u",
2398                                   __func__, i->frame_number, streamID);
2399                           notify_msg.type = CAMERA3_MSG_ERROR;
2400                           notify_msg.message.error.frame_number = i->frame_number;
2401                           notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2402                           notify_msg.message.error.error_stream = j->stream;
2403                           mCallbackOps->notify(mCallbackOps, &notify_msg);
2404                           ALOGW("%s: End of reporting error frame#=%u, streamID=%u",
2405                                  __func__, i->frame_number, streamID);
2406                           PendingFrameDropInfo PendingFrameDrop;
2407                           PendingFrameDrop.frame_number=i->frame_number;
2408                           PendingFrameDrop.stream_ID = streamID;
2409                           // Add the Frame drop info to mPendingFrameDropList
2410                           mPendingFrameDropList.push_back(PendingFrameDrop);
2411                      }
2412                   }
2413               } else {
2414                   ALOGE("%s: JPEG buffer dropped for frame number %d",
2415                           __func__, i->frame_number);
2416               }
2417            }
2418        }
2419
2420        //TODO: batch handling for dropped metadata
2421
2422        // Send empty metadata with already filled buffers for dropped metadata
2423        // and send valid metadata with already filled buffers for current metadata
2424        /* we could hit this case when we either
2425         * 1. have a pending reprocess request or
2426         * 2. miss a metadata buffer callback */
2427        if (i->frame_number < frame_number) {
2428            /* Clear notify_msg structure */
2429            camera3_notify_msg_t notify_msg;
2430            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2431            notify_msg.type = CAMERA3_MSG_SHUTTER;
2432            notify_msg.message.shutter.frame_number = i->frame_number;
2433            notify_msg.message.shutter.timestamp = (uint64_t)capture_time -
2434                        (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
2435            if (i->input_buffer) {
2436                i->partial_result_cnt++; //input request will not have urgent metadata
2437                CameraMetadata settings;
2438                if(i->settings) {
2439                    settings = i->settings;
2440                    if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2441                        nsecs_t input_capture_time =
2442                                settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2443                        notify_msg.message.shutter.timestamp = (uint64_t)input_capture_time;
2444                    } else {
2445                        ALOGE("%s: No timestamp in input settings! Using current one.",
2446                                __func__);
2447                    }
2448                } else {
2449                    ALOGE("%s: Input settings missing!", __func__);
2450                }
2451                result.result = settings.release();
2452                result.partial_result = i->partial_result_cnt;
2453                CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2454                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2455            } else {
2456                mPendingLiveRequest--;
2457                CameraMetadata dummyMetadata;
2458                dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
2459                        &i->timestamp, 1);
2460                dummyMetadata.update(ANDROID_REQUEST_ID,
2461                        &(i->request_id), 1);
2462                result.result = dummyMetadata.release();
2463            }
2464            mCallbackOps->notify(mCallbackOps, &notify_msg);
2465            i->timestamp = (nsecs_t)notify_msg.message.shutter.timestamp;
2466            CDBG("%s: Support notification !!!! notify frame_number = %u, capture_time = %llu",
2467                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2468        } else {
2469            mPendingLiveRequest--;
2470            /* Clear notify_msg structure */
2471            camera3_notify_msg_t notify_msg;
2472            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2473
2474            // Send shutter notify to frameworks
2475            notify_msg.type = CAMERA3_MSG_SHUTTER;
2476            notify_msg.message.shutter.frame_number = i->frame_number;
2477            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2478            mCallbackOps->notify(mCallbackOps, &notify_msg);
2479
2480            i->timestamp = capture_time;
2481
2482            // Find channel requiring metadata, meaning internal offline postprocess
2483            // is needed.
2484            //TODO: for now, we don't support two streams requiring metadata at the same time.
2485            // (because we are not making copies, and metadata buffer is not reference counted.
2486            bool internalPproc = false;
2487            for (pendingBufferIterator iter = i->buffers.begin();
2488                    iter != i->buffers.end(); iter++) {
2489                if (iter->need_metadata) {
2490                    internalPproc = true;
2491                    QCamera3ProcessingChannel *channel =
2492                            (QCamera3ProcessingChannel *)iter->stream->priv;
2493                    channel->queueReprocMetadata(metadata_buf);
2494                    break;
2495                }
2496            }
2497
2498            result.result = translateFromHalMetadata(metadata,
2499                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2500                    i->capture_intent, internalPproc);
2501
2502            saveExifParams(metadata);
2503
2504            if (i->blob_request) {
2505                {
2506                    //Dump tuning metadata if enabled and available
2507                    char prop[PROPERTY_VALUE_MAX];
2508                    memset(prop, 0, sizeof(prop));
2509                    property_get("persist.camera.dumpmetadata", prop, "0");
2510                    int32_t enabled = atoi(prop);
2511                    if (enabled && metadata->is_tuning_params_valid) {
2512                        dumpMetadataToFile(metadata->tuning_params,
2513                               mMetaFrameCount,
2514                               enabled,
2515                               "Snapshot",
2516                               frame_number);
2517                    }
2518                }
2519            }
2520
2521            if (!internalPproc) {
2522                CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2523                // Return metadata buffer
2524                if (free_and_bufdone_meta_buf) {
2525                    mMetadataChannel->bufDone(metadata_buf);
2526                    free(metadata_buf);
2527                }
2528            }
2529        }
2530        if (!result.result) {
2531            ALOGE("%s: metadata is NULL", __func__);
2532        }
2533        result.frame_number = i->frame_number;
2534        result.input_buffer = i->input_buffer;
2535        result.num_output_buffers = 0;
2536        result.output_buffers = NULL;
2537        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2538                    j != i->buffers.end(); j++) {
2539            if (j->buffer) {
2540                result.num_output_buffers++;
2541            }
2542        }
2543
2544        if (result.num_output_buffers > 0) {
2545            camera3_stream_buffer_t *result_buffers =
2546                new camera3_stream_buffer_t[result.num_output_buffers];
2547            if (!result_buffers) {
2548                ALOGE("%s: Fatal error: out of memory", __func__);
2549            }
2550            size_t result_buffers_idx = 0;
2551            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2552                    j != i->buffers.end(); j++) {
2553                if (j->buffer) {
2554                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2555                            m != mPendingFrameDropList.end(); m++) {
2556                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2557                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2558                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2559                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2560                            ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2561                                  __func__, frame_number, streamID);
2562                            m = mPendingFrameDropList.erase(m);
2563                            break;
2564                        }
2565                    }
2566
2567                    for (List<PendingBufferInfo>::iterator k =
2568                      mPendingBuffersMap.mPendingBufferList.begin();
2569                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2570                      if (k->buffer == j->buffer->buffer) {
2571                        CDBG("%s: Found buffer %p in pending buffer List "
2572                              "for frame %u, Take it out!!", __func__,
2573                               k->buffer, k->frame_number);
2574                        mPendingBuffersMap.num_buffers--;
2575                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2576                        break;
2577                      }
2578                    }
2579
2580                    result_buffers[result_buffers_idx++] = *(j->buffer);
2581                    free(j->buffer);
2582                    j->buffer = NULL;
2583                }
2584            }
2585            result.output_buffers = result_buffers;
2586            mCallbackOps->process_capture_result(mCallbackOps, &result);
2587            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2588                    __func__, __LINE__, result.frame_number, i->timestamp);
2589            free_camera_metadata((camera_metadata_t *)result.result);
2590            delete[] result_buffers;
2591        } else {
2592            mCallbackOps->process_capture_result(mCallbackOps, &result);
2593            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2594                        __func__, __LINE__, result.frame_number, i->timestamp);
2595            free_camera_metadata((camera_metadata_t *)result.result);
2596        }
2597        // erase the element from the list
2598        i = erasePendingRequest(i);
2599
2600        if (!mPendingReprocessResultList.empty()) {
2601            handlePendingReprocResults(frame_number + 1);
2602        }
2603    }
2604
2605done_metadata:
2606    for (pendingRequestIterator i = mPendingRequestsList.begin();
2607            i != mPendingRequestsList.end() ;i++) {
2608        i->pipeline_depth++;
2609    }
2610    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2611    unblockRequestIfNecessary();
2612
2613}
2614
2615/*===========================================================================
2616 * FUNCTION   : handleBufferWithLock
2617 *
2618 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2619 *
2620 * PARAMETERS : @buffer: image buffer for the callback
2621 *              @frame_number: frame number of the image buffer
2622 *
2623 * RETURN     :
2624 *
2625 *==========================================================================*/
2626void QCamera3HardwareInterface::handleBufferWithLock(
2627    camera3_stream_buffer_t *buffer, uint32_t frame_number)
2628{
2629    ATRACE_CALL();
2630    // If the frame number doesn't exist in the pending request list,
2631    // directly send the buffer to the frameworks, and update pending buffers map
2632    // Otherwise, book-keep the buffer.
2633    pendingRequestIterator i = mPendingRequestsList.begin();
2634    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2635        i++;
2636    }
2637    if (i == mPendingRequestsList.end()) {
2638        // Verify all pending requests frame_numbers are greater
2639        for (pendingRequestIterator j = mPendingRequestsList.begin();
2640                j != mPendingRequestsList.end(); j++) {
2641            if (j->frame_number < frame_number) {
2642                ALOGE("%s: Error: pending frame number %d is smaller than %d",
2643                        __func__, j->frame_number, frame_number);
2644            }
2645        }
2646        camera3_capture_result_t result;
2647        memset(&result, 0, sizeof(camera3_capture_result_t));
2648        result.result = NULL;
2649        result.frame_number = frame_number;
2650        result.num_output_buffers = 1;
2651        result.partial_result = 0;
2652        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2653                m != mPendingFrameDropList.end(); m++) {
2654            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
2655            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2656            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
2657                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2658                CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
2659                        __func__, frame_number, streamID);
2660                m = mPendingFrameDropList.erase(m);
2661                break;
2662            }
2663        }
2664        result.output_buffers = buffer;
2665        CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
2666                __func__, frame_number, buffer->buffer);
2667
2668        for (List<PendingBufferInfo>::iterator k =
2669                mPendingBuffersMap.mPendingBufferList.begin();
2670                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2671            if (k->buffer == buffer->buffer) {
2672                CDBG("%s: Found Frame buffer, take it out from list",
2673                        __func__);
2674
2675                mPendingBuffersMap.num_buffers--;
2676                k = mPendingBuffersMap.mPendingBufferList.erase(k);
2677                break;
2678            }
2679        }
2680        CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2681            __func__, mPendingBuffersMap.num_buffers);
2682
2683        mCallbackOps->process_capture_result(mCallbackOps, &result);
2684    } else {
2685        if (i->input_buffer) {
2686            CameraMetadata settings;
2687            camera3_notify_msg_t notify_msg;
2688            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2689            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2690            if(i->settings) {
2691                settings = i->settings;
2692                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2693                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2694                } else {
2695                    ALOGE("%s: No timestamp in input settings! Using current one.",
2696                            __func__);
2697                }
2698            } else {
2699                ALOGE("%s: Input settings missing!", __func__);
2700            }
2701
2702            notify_msg.type = CAMERA3_MSG_SHUTTER;
2703            notify_msg.message.shutter.frame_number = frame_number;
2704            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2705
2706            if (i->input_buffer->release_fence != -1) {
2707               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2708               close(i->input_buffer->release_fence);
2709               if (rc != OK) {
2710               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2711               }
2712            }
2713
2714            for (List<PendingBufferInfo>::iterator k =
2715                    mPendingBuffersMap.mPendingBufferList.begin();
2716                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2717                if (k->buffer == buffer->buffer) {
2718                    CDBG("%s: Found Frame buffer, take it out from list",
2719                            __func__);
2720
2721                    mPendingBuffersMap.num_buffers--;
2722                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
2723                    break;
2724                }
2725            }
2726            CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2727                __func__, mPendingBuffersMap.num_buffers);
2728
2729            bool notifyNow = true;
2730            for (pendingRequestIterator j = mPendingRequestsList.begin();
2731                    j != mPendingRequestsList.end(); j++) {
2732                if (j->frame_number < frame_number) {
2733                    notifyNow = false;
2734                    break;
2735                }
2736            }
2737
2738            if (notifyNow) {
2739                camera3_capture_result result;
2740                memset(&result, 0, sizeof(camera3_capture_result));
2741                result.frame_number = frame_number;
2742                result.result = i->settings;
2743                result.input_buffer = i->input_buffer;
2744                result.num_output_buffers = 1;
2745                result.output_buffers = buffer;
2746                result.partial_result = PARTIAL_RESULT_COUNT;
2747
2748                mCallbackOps->notify(mCallbackOps, &notify_msg);
2749                mCallbackOps->process_capture_result(mCallbackOps, &result);
2750                CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2751                i = erasePendingRequest(i);
2752            } else {
2753                // Cache reprocess result for later
2754                PendingReprocessResult pendingResult;
2755                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2756                pendingResult.notify_msg = notify_msg;
2757                pendingResult.buffer = *buffer;
2758                pendingResult.frame_number = frame_number;
2759                mPendingReprocessResultList.push_back(pendingResult);
2760                CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2761            }
2762        } else {
2763            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2764                j != i->buffers.end(); j++) {
2765                if (j->stream == buffer->stream) {
2766                    if (j->buffer != NULL) {
2767                        ALOGE("%s: Error: buffer is already set", __func__);
2768                    } else {
2769                        j->buffer = (camera3_stream_buffer_t *)malloc(
2770                            sizeof(camera3_stream_buffer_t));
2771                        *(j->buffer) = *buffer;
2772                        CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
2773                            __func__, buffer, frame_number);
2774                    }
2775                }
2776            }
2777        }
2778    }
2779}
2780
2781/*===========================================================================
2782 * FUNCTION   : unblockRequestIfNecessary
2783 *
2784 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2785 *              that mMutex is held when this function is called.
2786 *
2787 * PARAMETERS :
2788 *
2789 * RETURN     :
2790 *
2791 *==========================================================================*/
2792void QCamera3HardwareInterface::unblockRequestIfNecessary()
2793{
2794   // Unblock process_capture_request
2795   pthread_cond_signal(&mRequestCond);
2796}
2797
2798
2799/*===========================================================================
2800 * FUNCTION   : processCaptureRequest
2801 *
2802 * DESCRIPTION: process a capture request from camera service
2803 *
2804 * PARAMETERS :
2805 *   @request : request from framework to process
2806 *
2807 * RETURN     :
2808 *
2809 *==========================================================================*/
2810int QCamera3HardwareInterface::processCaptureRequest(
2811                    camera3_capture_request_t *request)
2812{
2813    ATRACE_CALL();
2814    int rc = NO_ERROR;
2815    int32_t request_id;
2816    CameraMetadata meta;
2817    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
2818    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
2819    bool isVidBufRequested = false;
2820    camera3_stream_buffer_t *pInputBuffer = NULL;
2821
2822    pthread_mutex_lock(&mMutex);
2823
2824    rc = validateCaptureRequest(request);
2825    if (rc != NO_ERROR) {
2826        ALOGE("%s: incoming request is not valid", __func__);
2827        pthread_mutex_unlock(&mMutex);
2828        return rc;
2829    }
2830
2831    meta = request->settings;
2832
2833    // For first capture request, send capture intent, and
2834    // stream on all streams
2835    if (mFirstRequest) {
2836        // send an unconfigure to the backend so that the isp
2837        // resources are deallocated
2838        if (!mFirstConfiguration) {
2839            cam_stream_size_info_t stream_config_info;
2840            int32_t hal_version = CAM_HAL_V3;
2841            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
2842            stream_config_info.buffer_info.min_buffers =
2843                    MIN_INFLIGHT_REQUESTS;
2844            stream_config_info.buffer_info.max_buffers =
2845                    MAX_INFLIGHT_REQUESTS;
2846            clear_metadata_buffer(mParameters);
2847            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2848                    CAM_INTF_PARM_HAL_VERSION, hal_version);
2849            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2850                    CAM_INTF_META_STREAM_INFO, stream_config_info);
2851            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2852                    mParameters);
2853            if (rc < 0) {
2854                ALOGE("%s: set_parms for unconfigure failed", __func__);
2855                pthread_mutex_unlock(&mMutex);
2856                return rc;
2857            }
2858        }
2859
2860        /* get eis information for stream configuration */
2861        cam_is_type_t is_type;
2862        char is_type_value[PROPERTY_VALUE_MAX];
2863        property_get("persist.camera.is_type", is_type_value, "0");
2864        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
2865
2866        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
2867            int32_t hal_version = CAM_HAL_V3;
2868            uint8_t captureIntent =
2869                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
2870            mCaptureIntent = captureIntent;
2871            clear_metadata_buffer(mParameters);
2872            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
2873            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
2874        }
2875
2876        //If EIS is enabled, turn it on for video
2877        bool setEis = m_bEisEnable && m_bEisSupportedSize;
2878        int32_t vsMode;
2879        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
2880        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
2881            rc = BAD_VALUE;
2882        }
2883
2884        //IS type will be 0 unless EIS is supported. If EIS is supported
2885        //it could either be 1 or 4 depending on the stream and video size
2886        if (setEis) {
2887            if (!m_bEisSupportedSize) {
2888                is_type = IS_TYPE_DIS;
2889            } else {
2890                is_type = IS_TYPE_EIS_2_0;
2891            }
2892            mStreamConfigInfo.is_type = is_type;
2893        } else {
2894            mStreamConfigInfo.is_type = IS_TYPE_NONE;
2895        }
2896
2897        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2898                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
2899        int32_t tintless_value = 1;
2900        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2901                CAM_INTF_PARM_TINTLESS, tintless_value);
2902        //Disable CDS for HFR mode and if mPprocBypass = true.
2903        //CDS is a session parameter in the backend/ISP, so need to be set/reset
2904        //after every configure_stream
2905        if(CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
2906            int32_t cds = CAM_CDS_MODE_OFF;
2907            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
2908                    CAM_INTF_PARM_CDS_MODE, cds))
2909                ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
2910
2911        }
2912        setMobicat();
2913
2914        /* Set fps and hfr mode while sending meta stream info so that sensor
2915         * can configure appropriate streaming mode */
2916        mHFRVideoFps = DEFAULT_VIDEO_FPS;
2917        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
2918            rc = setHalFpsRange(meta, mParameters);
2919            if (rc != NO_ERROR) {
2920                ALOGE("%s: setHalFpsRange failed", __func__);
2921            }
2922        }
2923        if (meta.exists(ANDROID_CONTROL_MODE)) {
2924            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
2925            rc = extractSceneMode(meta, metaMode, mParameters);
2926            if (rc != NO_ERROR) {
2927                ALOGE("%s: extractSceneMode failed", __func__);
2928            }
2929        }
2930
2931        //TODO: validate the arguments, HSV scenemode should have only the
2932        //advertised fps ranges
2933
2934        /*set the capture intent, hal version, tintless, stream info,
2935         *and disenable parameters to the backend*/
2936        CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
2937        mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
2938                    mParameters);
2939
2940        cam_dimension_t sensor_dim;
2941        memset(&sensor_dim, 0, sizeof(sensor_dim));
2942        rc = getSensorOutputSize(sensor_dim);
2943        if (rc != NO_ERROR) {
2944            ALOGE("%s: Failed to get sensor output size", __func__);
2945            pthread_mutex_unlock(&mMutex);
2946            return rc;
2947        }
2948
2949        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
2950                gCamCapability[mCameraId]->active_array_size.height,
2951                sensor_dim.width, sensor_dim.height);
2952
2953        /* Set batchmode before initializing channel. Since registerBuffer
2954         * internally initializes some of the channels, better set batchmode
2955         * even before first register buffer */
2956        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2957            it != mStreamInfo.end(); it++) {
2958            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2959            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
2960                    && mBatchSize) {
2961                rc = channel->setBatchSize(mBatchSize);
2962                //Disable per frame map unmap for HFR/batchmode case
2963                rc |= channel->setPerFrameMapUnmap(false);
2964                if (NO_ERROR != rc) {
2965                    ALOGE("%s : Channel init failed %d", __func__, rc);
2966                    pthread_mutex_unlock(&mMutex);
2967                    return rc;
2968                }
2969            }
2970        }
2971
2972        for (size_t i = 0; i < request->num_output_buffers; i++) {
2973            const camera3_stream_buffer_t& output = request->output_buffers[i];
2974            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
2975            /*for livesnapshot stream is_type will be DIS*/
2976            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
2977               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
2978               setEis)
2979                rc = channel->registerBuffer(output.buffer, is_type);
2980            else
2981                rc = channel->registerBuffer(output.buffer, IS_TYPE_NONE);
2982
2983            if (rc < 0) {
2984                ALOGE("%s: registerBuffer failed",
2985                        __func__);
2986                pthread_mutex_unlock(&mMutex);
2987                return -ENODEV;
2988            }
2989        }
2990
2991        //First initialize all streams
2992        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
2993            it != mStreamInfo.end(); it++) {
2994            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
2995            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
2996               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
2997               setEis)
2998                rc = channel->initialize(is_type);
2999            else {
3000                rc = channel->initialize(IS_TYPE_NONE);
3001            }
3002            if (NO_ERROR != rc) {
3003                ALOGE("%s : Channel initialization failed %d", __func__, rc);
3004                pthread_mutex_unlock(&mMutex);
3005                return rc;
3006            }
3007        }
3008
3009        if (mRawDumpChannel) {
3010            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3011            if (rc != NO_ERROR) {
3012                ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3013                pthread_mutex_unlock(&mMutex);
3014                return rc;
3015            }
3016        }
3017        if (mSupportChannel) {
3018            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3019            if (rc < 0) {
3020                ALOGE("%s: Support channel initialization failed", __func__);
3021                pthread_mutex_unlock(&mMutex);
3022                return rc;
3023            }
3024        }
3025        if (mAnalysisChannel) {
3026            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3027            if (rc < 0) {
3028                ALOGE("%s: Analysis channel initialization failed", __func__);
3029                pthread_mutex_unlock(&mMutex);
3030                return rc;
3031            }
3032        }
3033        if (mDummyBatchChannel) {
3034            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3035            if (rc < 0) {
3036                ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3037                pthread_mutex_unlock(&mMutex);
3038                return rc;
3039            }
3040            rc = mDummyBatchChannel->initialize(is_type);
3041            if (rc < 0) {
3042                ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3043                pthread_mutex_unlock(&mMutex);
3044                return rc;
3045            }
3046        }
3047
3048        //Then start them.
3049        CDBG_HIGH("%s: Start META Channel", __func__);
3050        rc = mMetadataChannel->start();
3051        if (rc < 0) {
3052            ALOGE("%s: META channel start failed", __func__);
3053            pthread_mutex_unlock(&mMutex);
3054            return rc;
3055        }
3056
3057        if (mAnalysisChannel) {
3058            rc = mAnalysisChannel->start();
3059            if (rc < 0) {
3060                ALOGE("%s: Analysis channel start failed", __func__);
3061                mMetadataChannel->stop();
3062                pthread_mutex_unlock(&mMutex);
3063                return rc;
3064            }
3065        }
3066
3067        if (mSupportChannel) {
3068            rc = mSupportChannel->start();
3069            if (rc < 0) {
3070                ALOGE("%s: Support channel start failed", __func__);
3071                mMetadataChannel->stop();
3072                /* Although support and analysis are mutually exclusive today
3073                   adding it in anycase for future proofing */
3074                if (mAnalysisChannel) {
3075                    mAnalysisChannel->stop();
3076                }
3077                pthread_mutex_unlock(&mMutex);
3078                return rc;
3079            }
3080        }
3081        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3082            it != mStreamInfo.end(); it++) {
3083            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3084            CDBG_HIGH("%s: Start Processing Channel mask=%d",
3085                    __func__, channel->getStreamTypeMask());
3086            rc = channel->start();
3087            if (rc < 0) {
3088                ALOGE("%s: channel start failed", __func__);
3089                pthread_mutex_unlock(&mMutex);
3090                return rc;
3091            }
3092        }
3093
3094        if (mRawDumpChannel) {
3095            CDBG("%s: Starting raw dump stream",__func__);
3096            rc = mRawDumpChannel->start();
3097            if (rc != NO_ERROR) {
3098                ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3099                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3100                      it != mStreamInfo.end(); it++) {
3101                    QCamera3Channel *channel =
3102                        (QCamera3Channel *)(*it)->stream->priv;
3103                    ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3104                        channel->getStreamTypeMask());
3105                    channel->stop();
3106                }
3107                if (mSupportChannel)
3108                    mSupportChannel->stop();
3109                if (mAnalysisChannel) {
3110                    mAnalysisChannel->stop();
3111                }
3112                mMetadataChannel->stop();
3113                pthread_mutex_unlock(&mMutex);
3114                return rc;
3115            }
3116        }
3117        mWokenUpByDaemon = false;
3118        mPendingLiveRequest = 0;
3119        mFirstConfiguration = false;
3120    }
3121
3122    uint32_t frameNumber = request->frame_number;
3123    cam_stream_ID_t streamID;
3124
3125    if (meta.exists(ANDROID_REQUEST_ID)) {
3126        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3127        mCurrentRequestId = request_id;
3128        CDBG("%s: Received request with id: %d",__func__, request_id);
3129    } else if (mFirstRequest || mCurrentRequestId == -1){
3130        ALOGE("%s: Unable to find request id field, \
3131                & no previous id available", __func__);
3132        pthread_mutex_unlock(&mMutex);
3133        return NAME_NOT_FOUND;
3134    } else {
3135        CDBG("%s: Re-using old request id", __func__);
3136        request_id = mCurrentRequestId;
3137    }
3138
3139    CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3140                                    __func__, __LINE__,
3141                                    request->num_output_buffers,
3142                                    request->input_buffer,
3143                                    frameNumber);
3144    // Acquire all request buffers first
3145    streamID.num_streams = 0;
3146    int blob_request = 0;
3147    uint32_t snapshotStreamId = 0;
3148    for (size_t i = 0; i < request->num_output_buffers; i++) {
3149        const camera3_stream_buffer_t& output = request->output_buffers[i];
3150        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3151
3152        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3153            //Call function to store local copy of jpeg data for encode params.
3154            blob_request = 1;
3155            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3156        }
3157
3158        if (output.acquire_fence != -1) {
3159           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3160           close(output.acquire_fence);
3161           if (rc != OK) {
3162              ALOGE("%s: sync wait failed %d", __func__, rc);
3163              pthread_mutex_unlock(&mMutex);
3164              return rc;
3165           }
3166        }
3167
3168        streamID.streamID[streamID.num_streams] =
3169            channel->getStreamID(channel->getStreamTypeMask());
3170        streamID.num_streams++;
3171
3172        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3173            isVidBufRequested = true;
3174        }
3175    }
3176
3177    if (blob_request && mRawDumpChannel) {
3178        CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3179        streamID.streamID[streamID.num_streams] =
3180            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3181        streamID.num_streams++;
3182    }
3183
3184    if(request->input_buffer == NULL) {
3185        /* Parse the settings:
3186         * - For every request in NORMAL MODE
3187         * - For every request in HFR mode during preview only case
3188         * - For first request of every batch in HFR mode during video
3189         * recording. In batchmode the same settings except frame number is
3190         * repeated in each request of the batch.
3191         */
3192        if (!mBatchSize ||
3193           (mBatchSize && !isVidBufRequested) ||
3194           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3195            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3196            if (rc < 0) {
3197                ALOGE("%s: fail to set frame parameters", __func__);
3198                pthread_mutex_unlock(&mMutex);
3199                return rc;
3200            }
3201        }
3202        /* For batchMode HFR, setFrameParameters is not called for every
3203         * request. But only frame number of the latest request is parsed */
3204        if (mBatchSize && ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3205                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3206            ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3207            return BAD_VALUE;
3208        }
3209        if (mNeedSensorRestart) {
3210            /* Unlock the mutex as restartSensor waits on the channels to be
3211             * stopped, which in turn calls stream callback functions -
3212             * handleBufferWithLock and handleMetadataWithLock */
3213            pthread_mutex_unlock(&mMutex);
3214            rc = dynamicUpdateMetaStreamInfo();
3215            if (rc != NO_ERROR) {
3216                ALOGE("%s: Restarting the sensor failed", __func__);
3217                return BAD_VALUE;
3218            }
3219            mNeedSensorRestart = false;
3220            pthread_mutex_lock(&mMutex);
3221        }
3222    } else {
3223
3224        if (request->input_buffer->acquire_fence != -1) {
3225           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3226           close(request->input_buffer->acquire_fence);
3227           if (rc != OK) {
3228              ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3229              pthread_mutex_unlock(&mMutex);
3230              return rc;
3231           }
3232        }
3233    }
3234
3235    /* Update pending request list and pending buffers map */
3236    PendingRequestInfo pendingRequest;
3237    pendingRequestIterator latestRequest;
3238    pendingRequest.frame_number = frameNumber;
3239    pendingRequest.num_buffers = request->num_output_buffers;
3240    pendingRequest.request_id = request_id;
3241    pendingRequest.blob_request = blob_request;
3242    pendingRequest.timestamp = 0;
3243    pendingRequest.bUrgentReceived = 0;
3244    if (request->input_buffer) {
3245        pendingRequest.input_buffer =
3246                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3247        *(pendingRequest.input_buffer) = *(request->input_buffer);
3248        pInputBuffer = pendingRequest.input_buffer;
3249    } else {
3250       pendingRequest.input_buffer = NULL;
3251       pInputBuffer = NULL;
3252    }
3253    CameraMetadata input_settings;
3254    input_settings = request->settings;
3255    pendingRequest.settings = input_settings.release();
3256    pendingRequest.pipeline_depth = 0;
3257    pendingRequest.partial_result_cnt = 0;
3258    extractJpegMetadata(mCurJpegMeta, request);
3259    pendingRequest.jpegMetadata = mCurJpegMeta;
3260
3261    //extract capture intent
3262    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3263        mCaptureIntent =
3264                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3265    }
3266    pendingRequest.capture_intent = mCaptureIntent;
3267
3268    for (size_t i = 0; i < request->num_output_buffers; i++) {
3269        RequestedBufferInfo requestedBuf;
3270        memset(&requestedBuf, 0, sizeof(requestedBuf));
3271        requestedBuf.stream = request->output_buffers[i].stream;
3272        requestedBuf.buffer = NULL;
3273        pendingRequest.buffers.push_back(requestedBuf);
3274
3275        // Add to buffer handle the pending buffers list
3276        PendingBufferInfo bufferInfo;
3277        bufferInfo.frame_number = frameNumber;
3278        bufferInfo.buffer = request->output_buffers[i].buffer;
3279        bufferInfo.stream = request->output_buffers[i].stream;
3280        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3281        mPendingBuffersMap.num_buffers++;
3282        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3283        CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3284                __func__, frameNumber, bufferInfo.buffer,
3285                channel->getStreamTypeMask(), bufferInfo.stream->format);
3286    }
3287    latestRequest = mPendingRequestsList.insert(
3288            mPendingRequestsList.end(), pendingRequest);
3289    if(mFlush) {
3290        pthread_mutex_unlock(&mMutex);
3291        return NO_ERROR;
3292    }
3293
3294    // Notify metadata channel we receive a request
3295    mMetadataChannel->request(NULL, frameNumber);
3296
3297    if(request->input_buffer != NULL){
3298        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3299        if (NO_ERROR != rc) {
3300            ALOGE("%s: fail to set reproc parameters", __func__);
3301            pthread_mutex_unlock(&mMutex);
3302            return rc;
3303        }
3304    }
3305
3306    // Call request on other streams
3307    uint32_t streams_need_metadata = 0;
3308    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3309    for (size_t i = 0; i < request->num_output_buffers; i++) {
3310        const camera3_stream_buffer_t& output = request->output_buffers[i];
3311        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3312
3313        if (channel == NULL) {
3314            ALOGE("%s: invalid channel pointer for stream", __func__);
3315            continue;
3316        }
3317
3318        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3319            if(request->input_buffer != NULL){
3320                rc = channel->request(output.buffer, frameNumber,
3321                        pInputBuffer, &mReprocMeta);
3322                if (rc < 0) {
3323                    ALOGE("%s: Fail to request on picture channel", __func__);
3324                    pthread_mutex_unlock(&mMutex);
3325                    return rc;
3326                }
3327            } else {
3328                CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3329                        __LINE__, output.buffer, frameNumber);
3330                if (!request->settings) {
3331                    rc = channel->request(output.buffer, frameNumber,
3332                            NULL, mPrevParameters);
3333                } else {
3334                    rc = channel->request(output.buffer, frameNumber,
3335                            NULL, mParameters);
3336                }
3337                if (rc < 0) {
3338                    ALOGE("%s: Fail to request on picture channel", __func__);
3339                    pthread_mutex_unlock(&mMutex);
3340                    return rc;
3341                }
3342                pendingBufferIter->need_metadata = true;
3343                streams_need_metadata++;
3344            }
3345        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3346            bool needMetadata = false;
3347            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3348            rc = yuvChannel->request(output.buffer, frameNumber,
3349                    pInputBuffer,
3350                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3351            if (rc < 0) {
3352                ALOGE("%s: Fail to request on YUV channel", __func__);
3353                pthread_mutex_unlock(&mMutex);
3354                return rc;
3355            }
3356            pendingBufferIter->need_metadata = needMetadata;
3357            if (needMetadata)
3358                streams_need_metadata += 1;
3359            CDBG("%s: calling YUV channel request, need_metadata is %d",
3360                    __func__, needMetadata);
3361        } else {
3362            CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3363                __LINE__, output.buffer, frameNumber);
3364            rc = channel->request(output.buffer, frameNumber);
3365            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3366                    && mBatchSize) {
3367                mToBeQueuedVidBufs++;
3368                if (mToBeQueuedVidBufs == mBatchSize) {
3369                    channel->queueBatchBuf();
3370                }
3371            }
3372            if (rc < 0) {
3373                ALOGE("%s: request failed", __func__);
3374                pthread_mutex_unlock(&mMutex);
3375                return rc;
3376            }
3377        }
3378        pendingBufferIter++;
3379    }
3380
3381    //If 2 streams have need_metadata set to true, fail the request, unless
3382    //we copy/reference count the metadata buffer
3383    if (streams_need_metadata > 1) {
3384        ALOGE("s: not supporting request in which two streams requires"
3385                " 2 HAL metadata for reprocessing", __func__);
3386        pthread_mutex_unlock(&mMutex);
3387        return -EINVAL;
3388    }
3389
3390    if(request->input_buffer == NULL) {
3391        /* Set the parameters to backend:
3392         * - For every request in NORMAL MODE
3393         * - For every request in HFR mode during preview only case
3394         * - Once every batch in HFR mode during video recording
3395         */
3396        if (!mBatchSize ||
3397           (mBatchSize && !isVidBufRequested) ||
3398           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3399            CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3400                    __func__, mBatchSize, isVidBufRequested,
3401                    mToBeQueuedVidBufs);
3402            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3403                    mParameters);
3404            if (rc < 0) {
3405                ALOGE("%s: set_parms failed", __func__);
3406            }
3407            /* reset to zero coz, the batch is queued */
3408            mToBeQueuedVidBufs = 0;
3409        }
3410        mPendingLiveRequest++;
3411    }
3412
3413    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3414
3415    mFirstRequest = false;
3416    // Added a timed condition wait
3417    struct timespec ts;
3418    uint8_t isValidTimeout = 1;
3419    rc = clock_gettime(CLOCK_REALTIME, &ts);
3420    if (rc < 0) {
3421      isValidTimeout = 0;
3422      ALOGE("%s: Error reading the real time clock!!", __func__);
3423    }
3424    else {
3425      // Make timeout as 5 sec for request to be honored
3426      ts.tv_sec += 5;
3427    }
3428    //Block on conditional variable
3429    if (mBatchSize) {
3430        /* For HFR, more buffers are dequeued upfront to improve the performance */
3431        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3432        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3433    }
3434    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3435        if (!isValidTimeout) {
3436            CDBG("%s: Blocking on conditional wait", __func__);
3437            pthread_cond_wait(&mRequestCond, &mMutex);
3438        }
3439        else {
3440            CDBG("%s: Blocking on timed conditional wait", __func__);
3441            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3442            if (rc == ETIMEDOUT) {
3443                rc = -ENODEV;
3444                ALOGE("%s: Unblocked on timeout!!!!", __func__);
3445                break;
3446            }
3447        }
3448        CDBG("%s: Unblocked", __func__);
3449        if (mWokenUpByDaemon) {
3450            mWokenUpByDaemon = false;
3451            if (mPendingLiveRequest < maxInFlightRequests)
3452                break;
3453        }
3454    }
3455    pthread_mutex_unlock(&mMutex);
3456
3457    return rc;
3458}
3459
3460/*===========================================================================
3461 * FUNCTION   : dump
3462 *
3463 * DESCRIPTION:
3464 *
3465 * PARAMETERS :
3466 *
3467 *
3468 * RETURN     :
3469 *==========================================================================*/
3470void QCamera3HardwareInterface::dump(int fd)
3471{
3472    pthread_mutex_lock(&mMutex);
3473    dprintf(fd, "\n Camera HAL3 information Begin \n");
3474
3475    dprintf(fd, "\nNumber of pending requests: %zu \n",
3476        mPendingRequestsList.size());
3477    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3478    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3479    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3480    for(pendingRequestIterator i = mPendingRequestsList.begin();
3481            i != mPendingRequestsList.end(); i++) {
3482        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3483        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3484        i->input_buffer);
3485    }
3486    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3487                mPendingBuffersMap.num_buffers);
3488    dprintf(fd, "-------+------------------\n");
3489    dprintf(fd, " Frame | Stream type mask \n");
3490    dprintf(fd, "-------+------------------\n");
3491    for(List<PendingBufferInfo>::iterator i =
3492        mPendingBuffersMap.mPendingBufferList.begin();
3493        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3494        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3495        dprintf(fd, " %5d | %11d \n",
3496                i->frame_number, channel->getStreamTypeMask());
3497    }
3498    dprintf(fd, "-------+------------------\n");
3499
3500    dprintf(fd, "\nPending frame drop list: %zu\n",
3501        mPendingFrameDropList.size());
3502    dprintf(fd, "-------+-----------\n");
3503    dprintf(fd, " Frame | Stream ID \n");
3504    dprintf(fd, "-------+-----------\n");
3505    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3506        i != mPendingFrameDropList.end(); i++) {
3507        dprintf(fd, " %5d | %9d \n",
3508            i->frame_number, i->stream_ID);
3509    }
3510    dprintf(fd, "-------+-----------\n");
3511
3512    dprintf(fd, "\n Camera HAL3 information End \n");
3513
3514    /* use dumpsys media.camera as trigger to send update debug level event */
3515    mUpdateDebugLevel = true;
3516    pthread_mutex_unlock(&mMutex);
3517    return;
3518}
3519
3520/*===========================================================================
3521 * FUNCTION   : flush
3522 *
3523 * DESCRIPTION:
3524 *
3525 * PARAMETERS :
3526 *
3527 *
3528 * RETURN     :
3529 *==========================================================================*/
3530int QCamera3HardwareInterface::flush()
3531{
3532    ATRACE_CALL();
3533    int32_t rc = NO_ERROR;
3534
3535    CDBG("%s: Unblocking Process Capture Request", __func__);
3536    pthread_mutex_lock(&mMutex);
3537    mFlush = true;
3538    pthread_mutex_unlock(&mMutex);
3539
3540    rc = stopAllChannels();
3541    if (rc < 0) {
3542        ALOGE("%s: stopAllChannels failed", __func__);
3543        return rc;
3544    }
3545
3546    // Mutex Lock
3547    pthread_mutex_lock(&mMutex);
3548
3549    // Unblock process_capture_request
3550    mPendingLiveRequest = 0;
3551    pthread_cond_signal(&mRequestCond);
3552
3553    rc = notifyErrorForPendingRequests();
3554    if (rc < 0) {
3555        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3556        pthread_mutex_unlock(&mMutex);
3557        return rc;
3558    }
3559
3560    mFlush = false;
3561
3562    // Start the Streams/Channels
3563    rc = startAllChannels();
3564    if (rc < 0) {
3565        ALOGE("%s: startAllChannels failed", __func__);
3566        pthread_mutex_unlock(&mMutex);
3567        return rc;
3568    }
3569
3570    pthread_mutex_unlock(&mMutex);
3571
3572    return 0;
3573}
3574
3575/*===========================================================================
3576 * FUNCTION   : captureResultCb
3577 *
3578 * DESCRIPTION: Callback handler for all capture result
3579 *              (streams, as well as metadata)
3580 *
3581 * PARAMETERS :
3582 *   @metadata : metadata information
3583 *   @buffer   : actual gralloc buffer to be returned to frameworks.
3584 *               NULL if metadata.
3585 *
3586 * RETURN     : NONE
3587 *==========================================================================*/
3588void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
3589                camera3_stream_buffer_t *buffer, uint32_t frame_number)
3590{
3591    if (metadata_buf) {
3592        if (mBatchSize) {
3593            handleBatchMetadata(metadata_buf,
3594                    true /* free_and_bufdone_meta_buf */);
3595        } else { /* mBatchSize = 0 */
3596            pthread_mutex_lock(&mMutex);
3597            handleMetadataWithLock(metadata_buf,
3598                    true /* free_and_bufdone_meta_buf */);
3599            pthread_mutex_unlock(&mMutex);
3600        }
3601    } else {
3602        pthread_mutex_lock(&mMutex);
3603        handleBufferWithLock(buffer, frame_number);
3604        pthread_mutex_unlock(&mMutex);
3605    }
3606    return;
3607}
3608
3609/*===========================================================================
3610 * FUNCTION   : getReprocessibleOutputStreamId
3611 *
3612 * DESCRIPTION: Get source output stream id for the input reprocess stream
3613 *              based on size and format, which would be the largest
3614 *              output stream if an input stream exists.
3615 *
3616 * PARAMETERS :
3617 *   @id      : return the stream id if found
3618 *
3619 * RETURN     : int32_t type of status
3620 *              NO_ERROR  -- success
3621 *              none-zero failure code
3622 *==========================================================================*/
3623int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
3624{
3625    stream_info_t* stream = NULL;
3626
3627    /* check if any output or bidirectional stream with the same size and format
3628       and return that stream */
3629    if ((mInputStreamInfo.dim.width > 0) &&
3630            (mInputStreamInfo.dim.height > 0)) {
3631        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3632                it != mStreamInfo.end(); it++) {
3633
3634            camera3_stream_t *stream = (*it)->stream;
3635            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
3636                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
3637                    (stream->format == mInputStreamInfo.format)) {
3638                // Usage flag for an input stream and the source output stream
3639                // may be different.
3640                CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
3641                CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
3642                        __func__, stream->usage, mInputStreamInfo.usage);
3643
3644                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
3645                if (channel != NULL && channel->mStreams[0]) {
3646                    id = channel->mStreams[0]->getMyServerID();
3647                    return NO_ERROR;
3648                }
3649            }
3650        }
3651    } else {
3652        CDBG("%s: No input stream, so no reprocessible output stream", __func__);
3653    }
3654    return NAME_NOT_FOUND;
3655}
3656
3657/*===========================================================================
3658 * FUNCTION   : lookupFwkName
3659 *
3660 * DESCRIPTION: In case the enum is not same in fwk and backend
3661 *              make sure the parameter is correctly propogated
3662 *
3663 * PARAMETERS  :
3664 *   @arr      : map between the two enums
3665 *   @len      : len of the map
3666 *   @hal_name : name of the hal_parm to map
3667 *
3668 * RETURN     : int type of status
3669 *              fwk_name  -- success
3670 *              none-zero failure code
3671 *==========================================================================*/
3672template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
3673        size_t len, halType hal_name)
3674{
3675
3676    for (size_t i = 0; i < len; i++) {
3677        if (arr[i].hal_name == hal_name) {
3678            return arr[i].fwk_name;
3679        }
3680    }
3681
3682    /* Not able to find matching framework type is not necessarily
3683     * an error case. This happens when mm-camera supports more attributes
3684     * than the frameworks do */
3685    CDBG_HIGH("%s: Cannot find matching framework type", __func__);
3686    return NAME_NOT_FOUND;
3687}
3688
3689/*===========================================================================
3690 * FUNCTION   : lookupHalName
3691 *
3692 * DESCRIPTION: In case the enum is not same in fwk and backend
3693 *              make sure the parameter is correctly propogated
3694 *
3695 * PARAMETERS  :
3696 *   @arr      : map between the two enums
3697 *   @len      : len of the map
3698 *   @fwk_name : name of the hal_parm to map
3699 *
3700 * RETURN     : int32_t type of status
3701 *              hal_name  -- success
3702 *              none-zero failure code
3703 *==========================================================================*/
3704template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
3705        size_t len, fwkType fwk_name)
3706{
3707    for (size_t i = 0; i < len; i++) {
3708        if (arr[i].fwk_name == fwk_name) {
3709            return arr[i].hal_name;
3710        }
3711    }
3712
3713    ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
3714    return NAME_NOT_FOUND;
3715}
3716
3717/*===========================================================================
3718 * FUNCTION   : lookupProp
3719 *
3720 * DESCRIPTION: lookup a value by its name
3721 *
3722 * PARAMETERS :
3723 *   @arr     : map between the two enums
3724 *   @len     : size of the map
3725 *   @name    : name to be looked up
3726 *
3727 * RETURN     : Value if found
3728 *              CAM_CDS_MODE_MAX if not found
3729 *==========================================================================*/
3730template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
3731        size_t len, const char *name)
3732{
3733    if (name) {
3734        for (size_t i = 0; i < len; i++) {
3735            if (!strcmp(arr[i].desc, name)) {
3736                return arr[i].val;
3737            }
3738        }
3739    }
3740    return CAM_CDS_MODE_MAX;
3741}
3742
3743/*===========================================================================
3744 *
3745 * DESCRIPTION:
3746 *
3747 * PARAMETERS :
3748 *   @metadata : metadata information from callback
3749 *   @timestamp: metadata buffer timestamp
3750 *   @request_id: request id
3751 *   @jpegMetadata: additional jpeg metadata
3752 *   @pprocDone: whether internal offline postprocsesing is done
3753 *
3754 * RETURN     : camera_metadata_t*
3755 *              metadata in a format specified by fwk
3756 *==========================================================================*/
3757camera_metadata_t*
3758QCamera3HardwareInterface::translateFromHalMetadata(
3759                                 metadata_buffer_t *metadata,
3760                                 nsecs_t timestamp,
3761                                 int32_t request_id,
3762                                 const CameraMetadata& jpegMetadata,
3763                                 uint8_t pipeline_depth,
3764                                 uint8_t capture_intent,
3765                                 bool pprocDone)
3766{
3767    CameraMetadata camMetadata;
3768    camera_metadata_t *resultMetadata;
3769
3770    if (jpegMetadata.entryCount())
3771        camMetadata.append(jpegMetadata);
3772
3773    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
3774    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
3775    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
3776    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
3777
3778    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
3779        int64_t fwk_frame_number = *frame_number;
3780        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
3781    }
3782
3783    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
3784        int32_t fps_range[2];
3785        fps_range[0] = (int32_t)float_range->min_fps;
3786        fps_range[1] = (int32_t)float_range->max_fps;
3787        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
3788                                      fps_range, 2);
3789        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
3790            __func__, fps_range[0], fps_range[1]);
3791    }
3792
3793    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
3794        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
3795    }
3796
3797    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
3798        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
3799                METADATA_MAP_SIZE(SCENE_MODES_MAP),
3800                *sceneMode);
3801        if (NAME_NOT_FOUND != val) {
3802            uint8_t fwkSceneMode = (uint8_t)val;
3803            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
3804            CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
3805                    __func__, fwkSceneMode);
3806        }
3807    }
3808
3809    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
3810        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
3811        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
3812    }
3813
3814    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
3815        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
3816        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
3817    }
3818
3819    IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
3820            CAM_INTF_META_FACE_DETECTION, metadata) {
3821        uint8_t numFaces = MIN(faceDetectionInfo->num_faces_detected, MAX_ROI);
3822        int32_t faceIds[MAX_ROI];
3823        uint8_t faceScores[MAX_ROI];
3824        int32_t faceRectangles[MAX_ROI * 4];
3825        int32_t faceLandmarks[MAX_ROI * 6];
3826        size_t j = 0, k = 0;
3827
3828        for (size_t i = 0; i < numFaces; i++) {
3829            faceIds[i] = faceDetectionInfo->faces[i].face_id;
3830            faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
3831            // Adjust crop region from sensor output coordinate system to active
3832            // array coordinate system.
3833            cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
3834            mCropRegionMapper.toActiveArray(rect.left, rect.top,
3835                    rect.width, rect.height);
3836
3837            convertToRegions(faceDetectionInfo->faces[i].face_boundary,
3838                faceRectangles+j, -1);
3839
3840            // Map the co-ordinate sensor output coordinate system to active
3841            // array coordinate system.
3842            cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
3843            mCropRegionMapper.toActiveArray(face.left_eye_center.x,
3844                    face.left_eye_center.y);
3845            mCropRegionMapper.toActiveArray(face.right_eye_center.x,
3846                    face.right_eye_center.y);
3847            mCropRegionMapper.toActiveArray(face.mouth_center.x,
3848                    face.mouth_center.y);
3849
3850            convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
3851            j+= 4;
3852            k+= 6;
3853        }
3854        if (numFaces <= 0) {
3855            memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
3856            memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
3857            memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
3858            memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
3859        }
3860        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
3861        camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
3862        camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, faceRectangles, numFaces * 4U);
3863        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, faceLandmarks, numFaces * 6U);
3864    }
3865
3866    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
3867        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
3868        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
3869    }
3870
3871    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
3872            CAM_INTF_META_EDGE_MODE, metadata) {
3873        uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
3874        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
3875        camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
3876    }
3877
3878    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
3879        uint8_t fwk_flashPower = (uint8_t) *flashPower;
3880        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
3881    }
3882
3883    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
3884        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
3885    }
3886
3887    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
3888        if (0 <= *flashState) {
3889            uint8_t fwk_flashState = (uint8_t) *flashState;
3890            if (!gCamCapability[mCameraId]->flash_available) {
3891                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
3892            }
3893            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
3894        }
3895    }
3896
3897    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
3898        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
3899        if (NAME_NOT_FOUND != val) {
3900            uint8_t fwk_flashMode = (uint8_t)val;
3901            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
3902        }
3903    }
3904
3905    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
3906        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
3907        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
3908    }
3909
3910    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
3911        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
3912    }
3913
3914    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
3915        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
3916    }
3917
3918    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
3919        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
3920    }
3921
3922    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
3923        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
3924        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
3925    }
3926
3927    /*EIS is currently not hooked up to the app, so set the mode to OFF*/
3928    uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3929    camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
3930
3931    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
3932        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
3933        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
3934    }
3935
3936    IF_META_AVAILABLE(uint32_t, noiseRedStrength, CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata) {
3937        uint8_t fwk_noiseRedStrength = (uint8_t) *noiseRedStrength;
3938        camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, &fwk_noiseRedStrength, 1);
3939    }
3940
3941    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
3942        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
3943    }
3944
3945    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
3946        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
3947
3948        CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
3949          blackLevelSourcePattern->cam_black_level[0],
3950          blackLevelSourcePattern->cam_black_level[1],
3951          blackLevelSourcePattern->cam_black_level[2],
3952          blackLevelSourcePattern->cam_black_level[3]);
3953    }
3954
3955    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
3956        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
3957        float fwk_blackLevelInd[4];
3958
3959        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
3960        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
3961        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
3962        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
3963
3964        CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
3965          blackLevelAppliedPattern->cam_black_level[0],
3966          blackLevelAppliedPattern->cam_black_level[1],
3967          blackLevelAppliedPattern->cam_black_level[2],
3968          blackLevelAppliedPattern->cam_black_level[3]);
3969        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
3970    }
3971
3972    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
3973            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
3974        int32_t scalerCropRegion[4];
3975        scalerCropRegion[0] = hScalerCropRegion->left;
3976        scalerCropRegion[1] = hScalerCropRegion->top;
3977        scalerCropRegion[2] = hScalerCropRegion->width;
3978        scalerCropRegion[3] = hScalerCropRegion->height;
3979
3980        // Adjust crop region from sensor output coordinate system to active
3981        // array coordinate system.
3982        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
3983                scalerCropRegion[2], scalerCropRegion[3]);
3984
3985        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
3986    }
3987
3988    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
3989        CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
3990        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
3991    }
3992
3993    IF_META_AVAILABLE(int64_t, sensorFameDuration,
3994            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
3995        CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
3996        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
3997    }
3998
3999    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4000            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4001        CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4002        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4003                sensorRollingShutterSkew, 1);
4004    }
4005
4006    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4007        CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4008        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4009
4010        //calculate the noise profile based on sensitivity
4011        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4012        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4013        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4014        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4015            noise_profile[i]   = noise_profile_S;
4016            noise_profile[i+1] = noise_profile_O;
4017        }
4018        CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4019                noise_profile_S, noise_profile_O);
4020        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4021                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4022    }
4023
4024    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4025        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4026        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4027    }
4028
4029    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4030        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4031                *faceDetectMode);
4032        if (NAME_NOT_FOUND != val) {
4033            uint8_t fwk_faceDetectMode = (uint8_t)val;
4034            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4035        }
4036    }
4037
4038    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4039        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4040        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4041    }
4042
4043    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4044            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4045        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4046        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4047    }
4048
4049    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4050            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4051        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4052                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4053    }
4054
4055    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4056            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4057        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4058                CAM_MAX_SHADING_MAP_HEIGHT);
4059        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4060                CAM_MAX_SHADING_MAP_WIDTH);
4061        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4062                lensShadingMap->lens_shading, 4U * map_width * map_height);
4063    }
4064
4065    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4066        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4067        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4068    }
4069
4070    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4071        //Populate CAM_INTF_META_TONEMAP_CURVES
4072        /* ch0 = G, ch 1 = B, ch 2 = R*/
4073        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4074            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4075                    __func__, tonemap->tonemap_points_cnt,
4076                    CAM_MAX_TONEMAP_CURVE_SIZE);
4077            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4078        }
4079
4080        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4081                        &tonemap->curves[0].tonemap_points[0][0],
4082                        tonemap->tonemap_points_cnt * 2);
4083
4084        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4085                        &tonemap->curves[1].tonemap_points[0][0],
4086                        tonemap->tonemap_points_cnt * 2);
4087
4088        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4089                        &tonemap->curves[2].tonemap_points[0][0],
4090                        tonemap->tonemap_points_cnt * 2);
4091    }
4092
4093    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4094            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4095        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4096                CC_GAINS_COUNT);
4097    }
4098
4099    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4100            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4101        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4102                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4103                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4104    }
4105
4106    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4107            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4108        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4109            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4110                    __func__, toneCurve->tonemap_points_cnt,
4111                    CAM_MAX_TONEMAP_CURVE_SIZE);
4112            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4113        }
4114        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4115                (float*)toneCurve->curve.tonemap_points,
4116                toneCurve->tonemap_points_cnt * 2);
4117    }
4118
4119    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4120            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4121        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4122                predColorCorrectionGains->gains, 4);
4123    }
4124
4125    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4126            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4127        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4128                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4129                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4130    }
4131
4132    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4133        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4134    }
4135
4136    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4137        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4138        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4139    }
4140
4141    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4142        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4143        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4144    }
4145
4146    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4147        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4148                *effectMode);
4149        if (NAME_NOT_FOUND != val) {
4150            uint8_t fwk_effectMode = (uint8_t)val;
4151            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4152        }
4153    }
4154
4155    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4156            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4157        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4158                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4159        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4160            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4161        }
4162        int32_t fwk_testPatternData[4];
4163        fwk_testPatternData[0] = testPatternData->r;
4164        fwk_testPatternData[3] = testPatternData->b;
4165        switch (gCamCapability[mCameraId]->color_arrangement) {
4166        case CAM_FILTER_ARRANGEMENT_RGGB:
4167        case CAM_FILTER_ARRANGEMENT_GRBG:
4168            fwk_testPatternData[1] = testPatternData->gr;
4169            fwk_testPatternData[2] = testPatternData->gb;
4170            break;
4171        case CAM_FILTER_ARRANGEMENT_GBRG:
4172        case CAM_FILTER_ARRANGEMENT_BGGR:
4173            fwk_testPatternData[2] = testPatternData->gr;
4174            fwk_testPatternData[1] = testPatternData->gb;
4175            break;
4176        default:
4177            ALOGE("%s: color arrangement %d is not supported", __func__,
4178                gCamCapability[mCameraId]->color_arrangement);
4179            break;
4180        }
4181        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4182    }
4183
4184    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4185        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4186    }
4187
4188    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4189        String8 str((const char *)gps_methods);
4190        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4191    }
4192
4193    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4194        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4195    }
4196
4197    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4198        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4199    }
4200
4201    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4202        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4203        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4204    }
4205
4206    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4207        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4208        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4209    }
4210
4211    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4212        int32_t fwk_thumb_size[2];
4213        fwk_thumb_size[0] = thumb_size->width;
4214        fwk_thumb_size[1] = thumb_size->height;
4215        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4216    }
4217
4218    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4219        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4220                privateData,
4221                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4222    }
4223
4224    if (metadata->is_tuning_params_valid) {
4225        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4226        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4227        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4228
4229
4230        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4231                sizeof(uint32_t));
4232        data += sizeof(uint32_t);
4233
4234        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4235                sizeof(uint32_t));
4236        CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4237        data += sizeof(uint32_t);
4238
4239        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4240                sizeof(uint32_t));
4241        CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4242        data += sizeof(uint32_t);
4243
4244        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4245                sizeof(uint32_t));
4246        CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4247        data += sizeof(uint32_t);
4248
4249        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4250                sizeof(uint32_t));
4251        CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4252        data += sizeof(uint32_t);
4253
4254        metadata->tuning_params.tuning_mod3_data_size = 0;
4255        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4256                sizeof(uint32_t));
4257        CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4258        data += sizeof(uint32_t);
4259
4260        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4261                TUNING_SENSOR_DATA_MAX);
4262        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4263                count);
4264        data += count;
4265
4266        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4267                TUNING_VFE_DATA_MAX);
4268        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4269                count);
4270        data += count;
4271
4272        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4273                TUNING_CPP_DATA_MAX);
4274        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4275                count);
4276        data += count;
4277
4278        count = MIN(metadata->tuning_params.tuning_cac_data_size,
4279                TUNING_CAC_DATA_MAX);
4280        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4281                count);
4282        data += count;
4283
4284        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4285                (int32_t *)(void *)tuning_meta_data_blob,
4286                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4287    }
4288
4289    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4290            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4291        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4292                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4293                NEUTRAL_COL_POINTS);
4294    }
4295
4296    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4297        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4298        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4299    }
4300
4301    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4302        int32_t aeRegions[REGIONS_TUPLE_COUNT];
4303        // Adjust crop region from sensor output coordinate system to active
4304        // array coordinate system.
4305        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4306                hAeRegions->rect.width, hAeRegions->rect.height);
4307
4308        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4309        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4310                REGIONS_TUPLE_COUNT);
4311        CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4312                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4313                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4314                hAeRegions->rect.height);
4315    }
4316
4317    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4318        /*af regions*/
4319        int32_t afRegions[REGIONS_TUPLE_COUNT];
4320        // Adjust crop region from sensor output coordinate system to active
4321        // array coordinate system.
4322        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4323                hAfRegions->rect.width, hAfRegions->rect.height);
4324
4325        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4326        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4327                REGIONS_TUPLE_COUNT);
4328        CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4329                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4330                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4331                hAfRegions->rect.height);
4332    }
4333
4334    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4335        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4336                *hal_ab_mode);
4337        if (NAME_NOT_FOUND != val) {
4338            uint8_t fwk_ab_mode = (uint8_t)val;
4339            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4340        }
4341    }
4342
4343    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4344        int val = lookupFwkName(SCENE_MODES_MAP,
4345                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4346        if (NAME_NOT_FOUND != val) {
4347            uint8_t fwkBestshotMode = (uint8_t)val;
4348            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4349            CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4350        } else {
4351            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4352        }
4353    }
4354
4355    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4356         uint8_t fwk_mode = (uint8_t) *mode;
4357         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4358    }
4359
4360    /* Constant metadata values to be update*/
4361    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4362    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4363
4364    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4365    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4366
4367    int32_t hotPixelMap[2];
4368    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4369
4370    // CDS
4371    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4372        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4373    }
4374
4375    // TNR
4376    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4377        uint8_t tnr_enable       = tnr->denoise_enable;
4378        int32_t tnr_process_type = (int32_t)tnr->process_plates;
4379
4380        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4381        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4382    }
4383
4384    // Reprocess crop data
4385    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4386        uint8_t cnt = crop_data->num_of_streams;
4387        if (pprocDone) {
4388            // HAL already does internal reprocessing, either via reprocessing before
4389            // JPEG encoding, or offline postprocessing for pproc bypass case.
4390            CDBG("%s: Internal offline postprocessing was done, no need for further crop", __func__);
4391        } else if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4392            // mm-qcamera-daemon only posts crop_data for streams
4393            // not linked to pproc. So no valid crop metadata is not
4394            // necessarily an error case.
4395            CDBG("%s: No valid crop metadata entries", __func__);
4396        } else {
4397            uint32_t reproc_stream_id;
4398            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4399                CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4400            } else {
4401                int rc = NO_ERROR;
4402                Vector<int32_t> roi_map;
4403                int32_t *crop = new int32_t[cnt*4];
4404                if (NULL == crop) {
4405                   rc = NO_MEMORY;
4406                }
4407                if (NO_ERROR == rc) {
4408                    int32_t streams_found = 0;
4409                    for (size_t i = 0; i < cnt; i++) {
4410                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4411                            crop[0] = crop_data->crop_info[i].crop.left;
4412                            crop[1] = crop_data->crop_info[i].crop.top;
4413                            crop[2] = crop_data->crop_info[i].crop.width;
4414                            crop[3] = crop_data->crop_info[i].crop.height;
4415                            roi_map.add(crop_data->crop_info[i].roi_map.left);
4416                            roi_map.add(crop_data->crop_info[i].roi_map.top);
4417                            roi_map.add(crop_data->crop_info[i].roi_map.width);
4418                            roi_map.add(crop_data->crop_info[i].roi_map.height);
4419                            streams_found++;
4420                            CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4421                                    __func__,
4422                                    crop_data->crop_info[i].crop.left,
4423                                    crop_data->crop_info[i].crop.top,
4424                                    crop_data->crop_info[i].crop.width,
4425                                    crop_data->crop_info[i].crop.height);
4426                            CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4427                                    __func__,
4428                                    crop_data->crop_info[i].roi_map.left,
4429                                    crop_data->crop_info[i].roi_map.top,
4430                                    crop_data->crop_info[i].roi_map.width,
4431                                    crop_data->crop_info[i].roi_map.height);
4432                            break;
4433
4434                       }
4435                    }
4436                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4437                            &streams_found, 1);
4438                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
4439                            crop, (size_t)(streams_found * 4));
4440                    if (roi_map.array()) {
4441                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4442                                roi_map.array(), roi_map.size());
4443                    }
4444               }
4445               if (crop) {
4446                   delete [] crop;
4447               }
4448            }
4449        }
4450    }
4451
4452    IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4453        int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4454                *cacMode);
4455        if (NAME_NOT_FOUND != val) {
4456            uint8_t fwkCacMode = (uint8_t)val;
4457            camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4458        } else {
4459            ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4460        }
4461    }
4462
4463    // Post blob of cam_cds_data through vendor tag.
4464    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4465        uint8_t cnt = cdsInfo->num_of_streams;
4466        cam_cds_data_t cdsDataOverride;
4467        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4468        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4469        cdsDataOverride.num_of_streams = 1;
4470        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4471            uint32_t reproc_stream_id;
4472            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4473                CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
4474            } else {
4475                for (size_t i = 0; i < cnt; i++) {
4476                    if (cdsInfo->cds_info[i].stream_id ==
4477                            reproc_stream_id) {
4478                        cdsDataOverride.cds_info[0].cds_enable =
4479                                cdsInfo->cds_info[i].cds_enable;
4480                        break;
4481                    }
4482                }
4483            }
4484        } else {
4485            ALOGE("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
4486        }
4487        camMetadata.update(QCAMERA3_CDS_INFO,
4488                (uint8_t *)&cdsDataOverride,
4489                sizeof(cam_cds_data_t));
4490    }
4491
4492    // Ldaf calibration data
4493    if (!mLdafCalibExist) {
4494        IF_META_AVAILABLE(uint32_t, ldafCalib,
4495                CAM_INTF_META_LDAF_EXIF, metadata) {
4496            mLdafCalibExist = true;
4497            mLdafCalib[0] = ldafCalib[0];
4498            mLdafCalib[1] = ldafCalib[1];
4499            CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
4500                    ldafCalib[0], ldafCalib[1]);
4501        }
4502    }
4503
4504    resultMetadata = camMetadata.release();
4505    return resultMetadata;
4506}
4507
4508/*===========================================================================
4509 * FUNCTION   : saveExifParams
4510 *
4511 * DESCRIPTION:
4512 *
4513 * PARAMETERS :
4514 *   @metadata : metadata information from callback
4515 *
4516 * RETURN     : none
4517 *
4518 *==========================================================================*/
4519void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
4520{
4521    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
4522            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
4523        mExifParams.ae_debug_params = *ae_exif_debug_params;
4524        mExifParams.ae_debug_params_valid = TRUE;
4525    }
4526    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
4527            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
4528        mExifParams.awb_debug_params = *awb_exif_debug_params;
4529        mExifParams.awb_debug_params_valid = TRUE;
4530    }
4531    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
4532            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
4533        mExifParams.af_debug_params = *af_exif_debug_params;
4534        mExifParams.af_debug_params_valid = TRUE;
4535    }
4536    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
4537            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
4538        mExifParams.asd_debug_params = *asd_exif_debug_params;
4539        mExifParams.asd_debug_params_valid = TRUE;
4540    }
4541    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
4542            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
4543        mExifParams.stats_debug_params = *stats_exif_debug_params;
4544        mExifParams.stats_debug_params_valid = TRUE;
4545    }
4546}
4547
4548/*===========================================================================
4549 * FUNCTION   : get3AExifParams
4550 *
4551 * DESCRIPTION:
4552 *
4553 * PARAMETERS : none
4554 *
4555 *
4556 * RETURN     : mm_jpeg_exif_params_t
4557 *
4558 *==========================================================================*/
4559mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
4560{
4561    return mExifParams;
4562}
4563
4564/*===========================================================================
4565 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
4566 *
4567 * DESCRIPTION:
4568 *
4569 * PARAMETERS :
4570 *   @metadata : metadata information from callback
4571 *
4572 * RETURN     : camera_metadata_t*
4573 *              metadata in a format specified by fwk
4574 *==========================================================================*/
4575camera_metadata_t*
4576QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
4577                                (metadata_buffer_t *metadata)
4578{
4579    CameraMetadata camMetadata;
4580    camera_metadata_t *resultMetadata;
4581
4582    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4583        uint8_t fwk_afState = (uint8_t) *afState;
4584        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4585        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4586    }
4587
4588    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4589        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4590    }
4591
4592    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4593        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4594    }
4595
4596    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
4597        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
4598        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
4599        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
4600    }
4601
4602    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
4603        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
4604                &aecTrigger->trigger, 1);
4605        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
4606                &aecTrigger->trigger_id, 1);
4607        CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
4608                __func__, aecTrigger->trigger);
4609        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
4610                aecTrigger->trigger_id);
4611    }
4612
4613    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
4614        uint8_t fwk_ae_state = (uint8_t) *ae_state;
4615        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
4616        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
4617    }
4618
4619    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4620        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4621        if (NAME_NOT_FOUND != val) {
4622            uint8_t fwkAfMode = (uint8_t)val;
4623            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4624            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
4625        } else {
4626            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__,
4627                    val);
4628        }
4629    }
4630
4631    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
4632        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
4633                &af_trigger->trigger, 1);
4634        CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
4635                __func__, af_trigger->trigger);
4636        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
4637        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
4638                af_trigger->trigger_id);
4639    }
4640
4641    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
4642        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4643                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
4644        if (NAME_NOT_FOUND != val) {
4645            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
4646            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
4647            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
4648        } else {
4649            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
4650        }
4651    }
4652
4653    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4654    uint32_t aeMode = CAM_AE_MODE_MAX;
4655    int32_t flashMode = CAM_FLASH_MODE_MAX;
4656    int32_t redeye = -1;
4657    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
4658        aeMode = *pAeMode;
4659    }
4660    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
4661        flashMode = *pFlashMode;
4662    }
4663    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
4664        redeye = *pRedeye;
4665    }
4666
4667    if (1 == redeye) {
4668        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4669        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4670    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
4671        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
4672                flashMode);
4673        if (NAME_NOT_FOUND != val) {
4674            fwk_aeMode = (uint8_t)val;
4675            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4676        } else {
4677            ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
4678        }
4679    } else if (aeMode == CAM_AE_MODE_ON) {
4680        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
4681        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4682    } else if (aeMode == CAM_AE_MODE_OFF) {
4683        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4684        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4685    } else {
4686        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
4687              "flashMode:%d, aeMode:%u!!!",
4688                __func__, redeye, flashMode, aeMode);
4689    }
4690
4691    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4692        uint8_t fwk_lensState = *lensState;
4693        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4694    }
4695
4696    resultMetadata = camMetadata.release();
4697    return resultMetadata;
4698}
4699
4700/*===========================================================================
4701 * FUNCTION   : dumpMetadataToFile
4702 *
4703 * DESCRIPTION: Dumps tuning metadata to file system
4704 *
4705 * PARAMETERS :
4706 *   @meta           : tuning metadata
4707 *   @dumpFrameCount : current dump frame count
4708 *   @enabled        : Enable mask
4709 *
4710 *==========================================================================*/
4711void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
4712                                                   uint32_t &dumpFrameCount,
4713                                                   bool enabled,
4714                                                   const char *type,
4715                                                   uint32_t frameNumber)
4716{
4717    uint32_t frm_num = 0;
4718
4719    //Some sanity checks
4720    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
4721        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
4722              __func__,
4723              meta.tuning_sensor_data_size,
4724              TUNING_SENSOR_DATA_MAX);
4725        return;
4726    }
4727
4728    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
4729        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
4730              __func__,
4731              meta.tuning_vfe_data_size,
4732              TUNING_VFE_DATA_MAX);
4733        return;
4734    }
4735
4736    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
4737        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
4738              __func__,
4739              meta.tuning_cpp_data_size,
4740              TUNING_CPP_DATA_MAX);
4741        return;
4742    }
4743
4744    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
4745        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
4746              __func__,
4747              meta.tuning_cac_data_size,
4748              TUNING_CAC_DATA_MAX);
4749        return;
4750    }
4751    //
4752
4753    if(enabled){
4754        char timeBuf[FILENAME_MAX];
4755        char buf[FILENAME_MAX];
4756        memset(buf, 0, sizeof(buf));
4757        memset(timeBuf, 0, sizeof(timeBuf));
4758        time_t current_time;
4759        struct tm * timeinfo;
4760        time (&current_time);
4761        timeinfo = localtime (&current_time);
4762        if (timeinfo != NULL) {
4763            strftime (timeBuf, sizeof(timeBuf),
4764                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
4765        }
4766        String8 filePath(timeBuf);
4767        snprintf(buf,
4768                sizeof(buf),
4769                "%dm_%s_%d.bin",
4770                dumpFrameCount,
4771                type,
4772                frameNumber);
4773        filePath.append(buf);
4774        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
4775        if (file_fd >= 0) {
4776            ssize_t written_len = 0;
4777            meta.tuning_data_version = TUNING_DATA_VERSION;
4778            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
4779            written_len += write(file_fd, data, sizeof(uint32_t));
4780            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
4781            CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4782            written_len += write(file_fd, data, sizeof(uint32_t));
4783            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
4784            CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4785            written_len += write(file_fd, data, sizeof(uint32_t));
4786            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
4787            CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4788            written_len += write(file_fd, data, sizeof(uint32_t));
4789            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
4790            CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4791            written_len += write(file_fd, data, sizeof(uint32_t));
4792            meta.tuning_mod3_data_size = 0;
4793            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
4794            CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4795            written_len += write(file_fd, data, sizeof(uint32_t));
4796            size_t total_size = meta.tuning_sensor_data_size;
4797            data = (void *)((uint8_t *)&meta.data);
4798            written_len += write(file_fd, data, total_size);
4799            total_size = meta.tuning_vfe_data_size;
4800            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
4801            written_len += write(file_fd, data, total_size);
4802            total_size = meta.tuning_cpp_data_size;
4803            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
4804            written_len += write(file_fd, data, total_size);
4805            total_size = meta.tuning_cac_data_size;
4806            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
4807            written_len += write(file_fd, data, total_size);
4808            close(file_fd);
4809        }else {
4810            ALOGE("%s: fail to open file for metadata dumping", __func__);
4811        }
4812    }
4813}
4814
4815/*===========================================================================
4816 * FUNCTION   : cleanAndSortStreamInfo
4817 *
4818 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
4819 *              and sort them such that raw stream is at the end of the list
4820 *              This is a workaround for camera daemon constraint.
4821 *
4822 * PARAMETERS : None
4823 *
4824 *==========================================================================*/
4825void QCamera3HardwareInterface::cleanAndSortStreamInfo()
4826{
4827    List<stream_info_t *> newStreamInfo;
4828
4829    /*clean up invalid streams*/
4830    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
4831            it != mStreamInfo.end();) {
4832        if(((*it)->status) == INVALID){
4833            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
4834            delete channel;
4835            free(*it);
4836            it = mStreamInfo.erase(it);
4837        } else {
4838            it++;
4839        }
4840    }
4841
4842    // Move preview/video/callback/snapshot streams into newList
4843    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4844            it != mStreamInfo.end();) {
4845        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
4846                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
4847                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
4848            newStreamInfo.push_back(*it);
4849            it = mStreamInfo.erase(it);
4850        } else
4851            it++;
4852    }
4853    // Move raw streams into newList
4854    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4855            it != mStreamInfo.end();) {
4856        newStreamInfo.push_back(*it);
4857        it = mStreamInfo.erase(it);
4858    }
4859
4860    mStreamInfo = newStreamInfo;
4861}
4862
4863/*===========================================================================
4864 * FUNCTION   : extractJpegMetadata
4865 *
4866 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
4867 *              JPEG metadata is cached in HAL, and return as part of capture
4868 *              result when metadata is returned from camera daemon.
4869 *
4870 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
4871 *              @request:      capture request
4872 *
4873 *==========================================================================*/
4874void QCamera3HardwareInterface::extractJpegMetadata(
4875        CameraMetadata& jpegMetadata,
4876        const camera3_capture_request_t *request)
4877{
4878    CameraMetadata frame_settings;
4879    frame_settings = request->settings;
4880
4881    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
4882        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
4883                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
4884                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
4885
4886    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
4887        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
4888                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
4889                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
4890
4891    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
4892        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
4893                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
4894                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
4895
4896    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
4897        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
4898                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
4899                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
4900
4901    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
4902        jpegMetadata.update(ANDROID_JPEG_QUALITY,
4903                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
4904                frame_settings.find(ANDROID_JPEG_QUALITY).count);
4905
4906    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
4907        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
4908                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
4909                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
4910
4911    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
4912        int32_t thumbnail_size[2];
4913        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
4914        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
4915        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
4916            int32_t orientation =
4917                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
4918            if ((orientation == 90) || (orientation == 270)) {
4919               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
4920               int32_t temp;
4921               temp = thumbnail_size[0];
4922               thumbnail_size[0] = thumbnail_size[1];
4923               thumbnail_size[1] = temp;
4924            }
4925         }
4926         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
4927                thumbnail_size,
4928                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
4929    }
4930
4931}
4932
4933/*===========================================================================
4934 * FUNCTION   : convertToRegions
4935 *
4936 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
4937 *
4938 * PARAMETERS :
4939 *   @rect   : cam_rect_t struct to convert
4940 *   @region : int32_t destination array
4941 *   @weight : if we are converting from cam_area_t, weight is valid
4942 *             else weight = -1
4943 *
4944 *==========================================================================*/
4945void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
4946        int32_t *region, int weight)
4947{
4948    region[0] = rect.left;
4949    region[1] = rect.top;
4950    region[2] = rect.left + rect.width;
4951    region[3] = rect.top + rect.height;
4952    if (weight > -1) {
4953        region[4] = weight;
4954    }
4955}
4956
4957/*===========================================================================
4958 * FUNCTION   : convertFromRegions
4959 *
4960 * DESCRIPTION: helper method to convert from array to cam_rect_t
4961 *
4962 * PARAMETERS :
4963 *   @rect   : cam_rect_t struct to convert
4964 *   @region : int32_t destination array
4965 *   @weight : if we are converting from cam_area_t, weight is valid
4966 *             else weight = -1
4967 *
4968 *==========================================================================*/
4969void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
4970        const camera_metadata_t *settings, uint32_t tag)
4971{
4972    CameraMetadata frame_settings;
4973    frame_settings = settings;
4974    int32_t x_min = frame_settings.find(tag).data.i32[0];
4975    int32_t y_min = frame_settings.find(tag).data.i32[1];
4976    int32_t x_max = frame_settings.find(tag).data.i32[2];
4977    int32_t y_max = frame_settings.find(tag).data.i32[3];
4978    roi.weight = frame_settings.find(tag).data.i32[4];
4979    roi.rect.left = x_min;
4980    roi.rect.top = y_min;
4981    roi.rect.width = x_max - x_min;
4982    roi.rect.height = y_max - y_min;
4983}
4984
4985/*===========================================================================
4986 * FUNCTION   : resetIfNeededROI
4987 *
4988 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
4989 *              crop region
4990 *
4991 * PARAMETERS :
4992 *   @roi       : cam_area_t struct to resize
4993 *   @scalerCropRegion : cam_crop_region_t region to compare against
4994 *
4995 *
4996 *==========================================================================*/
4997bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
4998                                                 const cam_crop_region_t* scalerCropRegion)
4999{
5000    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5001    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5002    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5003    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5004
5005    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5006     * without having this check the calculations below to validate if the roi
5007     * is inside scalar crop region will fail resulting in the roi not being
5008     * reset causing algorithm to continue to use stale roi window
5009     */
5010    if (roi->weight == 0) {
5011        return true;
5012    }
5013
5014    if ((roi_x_max < scalerCropRegion->left) ||
5015        // right edge of roi window is left of scalar crop's left edge
5016        (roi_y_max < scalerCropRegion->top)  ||
5017        // bottom edge of roi window is above scalar crop's top edge
5018        (roi->rect.left > crop_x_max) ||
5019        // left edge of roi window is beyond(right) of scalar crop's right edge
5020        (roi->rect.top > crop_y_max)){
5021        // top edge of roi windo is above scalar crop's top edge
5022        return false;
5023    }
5024    if (roi->rect.left < scalerCropRegion->left) {
5025        roi->rect.left = scalerCropRegion->left;
5026    }
5027    if (roi->rect.top < scalerCropRegion->top) {
5028        roi->rect.top = scalerCropRegion->top;
5029    }
5030    if (roi_x_max > crop_x_max) {
5031        roi_x_max = crop_x_max;
5032    }
5033    if (roi_y_max > crop_y_max) {
5034        roi_y_max = crop_y_max;
5035    }
5036    roi->rect.width = roi_x_max - roi->rect.left;
5037    roi->rect.height = roi_y_max - roi->rect.top;
5038    return true;
5039}
5040
5041/*===========================================================================
5042 * FUNCTION   : convertLandmarks
5043 *
5044 * DESCRIPTION: helper method to extract the landmarks from face detection info
5045 *
5046 * PARAMETERS :
5047 *   @face   : cam_rect_t struct to convert
5048 *   @landmarks : int32_t destination array
5049 *
5050 *
5051 *==========================================================================*/
5052void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5053{
5054    landmarks[0] = (int32_t)face.left_eye_center.x;
5055    landmarks[1] = (int32_t)face.left_eye_center.y;
5056    landmarks[2] = (int32_t)face.right_eye_center.x;
5057    landmarks[3] = (int32_t)face.right_eye_center.y;
5058    landmarks[4] = (int32_t)face.mouth_center.x;
5059    landmarks[5] = (int32_t)face.mouth_center.y;
5060}
5061
5062#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5063/*===========================================================================
5064 * FUNCTION   : initCapabilities
5065 *
5066 * DESCRIPTION: initialize camera capabilities in static data struct
5067 *
5068 * PARAMETERS :
5069 *   @cameraId  : camera Id
5070 *
5071 * RETURN     : int32_t type of status
5072 *              NO_ERROR  -- success
5073 *              none-zero failure code
5074 *==========================================================================*/
5075int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5076{
5077    int rc = 0;
5078    mm_camera_vtbl_t *cameraHandle = NULL;
5079    QCamera3HeapMemory *capabilityHeap = NULL;
5080
5081    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5082    if (rc || !cameraHandle) {
5083        ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5084        goto open_failed;
5085    }
5086
5087    capabilityHeap = new QCamera3HeapMemory(1);
5088    if (capabilityHeap == NULL) {
5089        ALOGE("%s: creation of capabilityHeap failed", __func__);
5090        goto heap_creation_failed;
5091    }
5092    /* Allocate memory for capability buffer */
5093    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5094    if(rc != OK) {
5095        ALOGE("%s: No memory for cappability", __func__);
5096        goto allocate_failed;
5097    }
5098
5099    /* Map memory for capability buffer */
5100    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5101    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5102                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5103                                capabilityHeap->getFd(0),
5104                                sizeof(cam_capability_t));
5105    if(rc < 0) {
5106        ALOGE("%s: failed to map capability buffer", __func__);
5107        goto map_failed;
5108    }
5109
5110    /* Query Capability */
5111    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5112    if(rc < 0) {
5113        ALOGE("%s: failed to query capability",__func__);
5114        goto query_failed;
5115    }
5116    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5117    if (!gCamCapability[cameraId]) {
5118        ALOGE("%s: out of memory", __func__);
5119        goto query_failed;
5120    }
5121    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5122                                        sizeof(cam_capability_t));
5123    rc = 0;
5124
5125query_failed:
5126    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5127                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5128map_failed:
5129    capabilityHeap->deallocate();
5130allocate_failed:
5131    delete capabilityHeap;
5132heap_creation_failed:
5133    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5134    cameraHandle = NULL;
5135open_failed:
5136    return rc;
5137}
5138
5139/*==========================================================================
5140 * FUNCTION   : get3Aversion
5141 *
5142 * DESCRIPTION: get the Q3A S/W version
5143 *
5144 * PARAMETERS :
5145 *  @sw_version: Reference of Q3A structure which will hold version info upon
5146 *               return
5147 *
5148 * RETURN     : None
5149 *
5150 *==========================================================================*/
5151void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5152{
5153    if(gCamCapability[mCameraId])
5154        sw_version = gCamCapability[mCameraId]->q3a_version;
5155    else
5156        ALOGE("%s:Capability structure NULL!", __func__);
5157}
5158
5159
5160/*===========================================================================
5161 * FUNCTION   : initParameters
5162 *
5163 * DESCRIPTION: initialize camera parameters
5164 *
5165 * PARAMETERS :
5166 *
5167 * RETURN     : int32_t type of status
5168 *              NO_ERROR  -- success
5169 *              none-zero failure code
5170 *==========================================================================*/
5171int QCamera3HardwareInterface::initParameters()
5172{
5173    int rc = 0;
5174
5175    //Allocate Set Param Buffer
5176    mParamHeap = new QCamera3HeapMemory(1);
5177    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5178    if(rc != OK) {
5179        rc = NO_MEMORY;
5180        ALOGE("Failed to allocate SETPARM Heap memory");
5181        delete mParamHeap;
5182        mParamHeap = NULL;
5183        return rc;
5184    }
5185
5186    //Map memory for parameters buffer
5187    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5188            CAM_MAPPING_BUF_TYPE_PARM_BUF,
5189            mParamHeap->getFd(0),
5190            sizeof(metadata_buffer_t));
5191    if(rc < 0) {
5192        ALOGE("%s:failed to map SETPARM buffer",__func__);
5193        rc = FAILED_TRANSACTION;
5194        mParamHeap->deallocate();
5195        delete mParamHeap;
5196        mParamHeap = NULL;
5197        return rc;
5198    }
5199
5200    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5201
5202    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5203    return rc;
5204}
5205
5206/*===========================================================================
5207 * FUNCTION   : deinitParameters
5208 *
5209 * DESCRIPTION: de-initialize camera parameters
5210 *
5211 * PARAMETERS :
5212 *
5213 * RETURN     : NONE
5214 *==========================================================================*/
5215void QCamera3HardwareInterface::deinitParameters()
5216{
5217    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5218            CAM_MAPPING_BUF_TYPE_PARM_BUF);
5219
5220    mParamHeap->deallocate();
5221    delete mParamHeap;
5222    mParamHeap = NULL;
5223
5224    mParameters = NULL;
5225
5226    free(mPrevParameters);
5227    mPrevParameters = NULL;
5228}
5229
5230/*===========================================================================
5231 * FUNCTION   : calcMaxJpegSize
5232 *
5233 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5234 *
5235 * PARAMETERS :
5236 *
5237 * RETURN     : max_jpeg_size
5238 *==========================================================================*/
5239size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5240{
5241    size_t max_jpeg_size = 0;
5242    size_t temp_width, temp_height;
5243    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5244            MAX_SIZES_CNT);
5245    for (size_t i = 0; i < count; i++) {
5246        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5247        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5248        if (temp_width * temp_height > max_jpeg_size ) {
5249            max_jpeg_size = temp_width * temp_height;
5250        }
5251    }
5252    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5253    return max_jpeg_size;
5254}
5255
5256/*===========================================================================
5257 * FUNCTION   : getMaxRawSize
5258 *
5259 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5260 *
5261 * PARAMETERS :
5262 *
5263 * RETURN     : Largest supported Raw Dimension
5264 *==========================================================================*/
5265cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5266{
5267    int max_width = 0;
5268    cam_dimension_t maxRawSize;
5269
5270    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5271    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5272        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5273            max_width = gCamCapability[camera_id]->raw_dim[i].width;
5274            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5275        }
5276    }
5277    return maxRawSize;
5278}
5279
5280
5281/*===========================================================================
5282 * FUNCTION   : calcMaxJpegDim
5283 *
5284 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5285 *
5286 * PARAMETERS :
5287 *
5288 * RETURN     : max_jpeg_dim
5289 *==========================================================================*/
5290cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5291{
5292    cam_dimension_t max_jpeg_dim;
5293    cam_dimension_t curr_jpeg_dim;
5294    max_jpeg_dim.width = 0;
5295    max_jpeg_dim.height = 0;
5296    curr_jpeg_dim.width = 0;
5297    curr_jpeg_dim.height = 0;
5298    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5299        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5300        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5301        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5302            max_jpeg_dim.width * max_jpeg_dim.height ) {
5303            max_jpeg_dim.width = curr_jpeg_dim.width;
5304            max_jpeg_dim.height = curr_jpeg_dim.height;
5305        }
5306    }
5307    return max_jpeg_dim;
5308}
5309
5310/*===========================================================================
5311 * FUNCTION   : addStreamConfig
5312 *
5313 * DESCRIPTION: adds the stream configuration to the array
5314 *
5315 * PARAMETERS :
5316 * @available_stream_configs : pointer to stream configuration array
5317 * @scalar_format            : scalar format
5318 * @dim                      : configuration dimension
5319 * @config_type              : input or output configuration type
5320 *
5321 * RETURN     : NONE
5322 *==========================================================================*/
5323void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5324        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5325{
5326    available_stream_configs.add(scalar_format);
5327    available_stream_configs.add(dim.width);
5328    available_stream_configs.add(dim.height);
5329    available_stream_configs.add(config_type);
5330}
5331
5332
5333/*===========================================================================
5334 * FUNCTION   : initStaticMetadata
5335 *
5336 * DESCRIPTION: initialize the static metadata
5337 *
5338 * PARAMETERS :
5339 *   @cameraId  : camera Id
5340 *
5341 * RETURN     : int32_t type of status
5342 *              0  -- success
5343 *              non-zero failure code
5344 *==========================================================================*/
5345int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5346{
5347    int rc = 0;
5348    CameraMetadata staticInfo;
5349    size_t count = 0;
5350    bool limitedDevice = false;
5351
5352    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5353     * guaranteed, its advertised as limited device */
5354    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5355            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5356
5357    uint8_t supportedHwLvl = limitedDevice ?
5358            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5359            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
5360
5361    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5362            &supportedHwLvl, 1);
5363
5364    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5365    /*HAL 3 only*/
5366    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5367                    &gCamCapability[cameraId]->min_focus_distance, 1);
5368
5369    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5370                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
5371
5372    /*should be using focal lengths but sensor doesn't provide that info now*/
5373    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5374                      &gCamCapability[cameraId]->focal_length,
5375                      1);
5376
5377    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5378                      gCamCapability[cameraId]->apertures,
5379                      gCamCapability[cameraId]->apertures_count);
5380
5381    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5382                gCamCapability[cameraId]->filter_densities,
5383                gCamCapability[cameraId]->filter_densities_count);
5384
5385
5386    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5387                      (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5388                      gCamCapability[cameraId]->optical_stab_modes_count);
5389
5390    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5391            gCamCapability[cameraId]->lens_shading_map_size.height};
5392    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5393                      lens_shading_map_size,
5394                      sizeof(lens_shading_map_size)/sizeof(int32_t));
5395
5396    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5397            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5398
5399    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5400            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5401
5402    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5403            &gCamCapability[cameraId]->max_frame_duration, 1);
5404
5405    camera_metadata_rational baseGainFactor = {
5406            gCamCapability[cameraId]->base_gain_factor.numerator,
5407            gCamCapability[cameraId]->base_gain_factor.denominator};
5408    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5409                      &baseGainFactor, 1);
5410
5411    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5412                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5413
5414    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5415            gCamCapability[cameraId]->pixel_array_size.height};
5416    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5417                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5418
5419    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5420                                                gCamCapability[cameraId]->active_array_size.top,
5421                                                gCamCapability[cameraId]->active_array_size.width,
5422                                                gCamCapability[cameraId]->active_array_size.height};
5423    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5424                      active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5425
5426    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5427            &gCamCapability[cameraId]->white_level, 1);
5428
5429    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5430            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5431
5432    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5433                      &gCamCapability[cameraId]->flash_charge_duration, 1);
5434
5435    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5436                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5437
5438    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
5439    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
5440                      (int32_t *)&maxFaces, 1);
5441
5442    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
5443    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5444            &timestampSource, 1);
5445
5446    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5447                      &gCamCapability[cameraId]->histogram_size, 1);
5448
5449    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5450            &gCamCapability[cameraId]->max_histogram_count, 1);
5451
5452    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5453            gCamCapability[cameraId]->sharpness_map_size.height};
5454
5455    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5456            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5457
5458    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5459            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5460
5461    int32_t scalar_formats[] = {
5462            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5463            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5464            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5465            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5466            HAL_PIXEL_FORMAT_RAW10,
5467            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5468    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5469    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5470                      scalar_formats,
5471                      scalar_formats_count);
5472
5473    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5474    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5475    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
5476            count, MAX_SIZES_CNT, available_processed_sizes);
5477    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
5478            available_processed_sizes, count * 2);
5479
5480    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
5481    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
5482    makeTable(gCamCapability[cameraId]->raw_dim,
5483            count, MAX_SIZES_CNT, available_raw_sizes);
5484    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
5485            available_raw_sizes, count * 2);
5486
5487    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
5488    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
5489    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
5490            count, MAX_SIZES_CNT, available_fps_ranges);
5491    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5492            available_fps_ranges, count * 2);
5493
5494    camera_metadata_rational exposureCompensationStep = {
5495            gCamCapability[cameraId]->exp_compensation_step.numerator,
5496            gCamCapability[cameraId]->exp_compensation_step.denominator};
5497    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
5498                      &exposureCompensationStep, 1);
5499
5500    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
5501    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5502                      availableVstabModes, sizeof(availableVstabModes));
5503
5504    /*HAL 1 and HAL 3 common*/
5505    float maxZoom = 4;
5506    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5507            &maxZoom, 1);
5508
5509    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
5510    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
5511
5512    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
5513    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
5514        max3aRegions[2] = 0; /* AF not supported */
5515    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
5516            max3aRegions, 3);
5517
5518    uint8_t availableFaceDetectModes[] = {
5519            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
5520            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
5521    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5522            availableFaceDetectModes,
5523            sizeof(availableFaceDetectModes)/sizeof(availableFaceDetectModes[0]));
5524
5525    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
5526                                           gCamCapability[cameraId]->exposure_compensation_max};
5527    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
5528            exposureCompensationRange,
5529            sizeof(exposureCompensationRange)/sizeof(int32_t));
5530
5531    uint8_t lensFacing = (facingBack) ?
5532            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
5533    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
5534
5535    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5536                      available_thumbnail_sizes,
5537                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
5538
5539    /*all sizes will be clubbed into this tag*/
5540    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
5541    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5542    size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
5543            count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
5544            gCamCapability[cameraId]->max_downscale_factor);
5545    /*android.scaler.availableStreamConfigurations*/
5546    size_t max_stream_configs_size = count * scalar_formats_count * 4;
5547    Vector<int32_t> available_stream_configs;
5548    cam_dimension_t active_array_dim;
5549    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
5550    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
5551    /* Add input/output stream configurations for each scalar formats*/
5552    for (size_t j = 0; j < scalar_formats_count; j++) {
5553        switch (scalar_formats[j]) {
5554        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5555        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5556        case HAL_PIXEL_FORMAT_RAW10:
5557            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5558                addStreamConfig(available_stream_configs, scalar_formats[j],
5559                        gCamCapability[cameraId]->raw_dim[i],
5560                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5561            }
5562            break;
5563        case HAL_PIXEL_FORMAT_BLOB:
5564            cam_dimension_t jpeg_size;
5565            for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
5566                jpeg_size.width  = available_jpeg_sizes[i*2];
5567                jpeg_size.height = available_jpeg_sizes[i*2+1];
5568                addStreamConfig(available_stream_configs, scalar_formats[j],
5569                        jpeg_size,
5570                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5571            }
5572            break;
5573        case HAL_PIXEL_FORMAT_YCbCr_420_888:
5574        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
5575        default:
5576            cam_dimension_t largest_picture_size;
5577            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
5578            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5579                addStreamConfig(available_stream_configs, scalar_formats[j],
5580                        gCamCapability[cameraId]->picture_sizes_tbl[i],
5581                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5582                /* Book keep largest */
5583                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
5584                        >= largest_picture_size.width &&
5585                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
5586                        >= largest_picture_size.height)
5587                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
5588            }
5589            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
5590            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
5591                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5592                 addStreamConfig(available_stream_configs, scalar_formats[j],
5593                         largest_picture_size,
5594                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
5595            }
5596            break;
5597        }
5598    }
5599
5600    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5601                      available_stream_configs.array(), available_stream_configs.size());
5602    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5603    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5604
5605    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5606    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5607
5608    /* android.scaler.availableMinFrameDurations */
5609    int64_t available_min_durations[max_stream_configs_size];
5610    size_t idx = 0;
5611    for (size_t j = 0; j < scalar_formats_count; j++) {
5612        switch (scalar_formats[j]) {
5613        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5614        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5615        case HAL_PIXEL_FORMAT_RAW10:
5616            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5617                available_min_durations[idx] = scalar_formats[j];
5618                available_min_durations[idx+1] =
5619                    gCamCapability[cameraId]->raw_dim[i].width;
5620                available_min_durations[idx+2] =
5621                    gCamCapability[cameraId]->raw_dim[i].height;
5622                available_min_durations[idx+3] =
5623                    gCamCapability[cameraId]->raw_min_duration[i];
5624                idx+=4;
5625            }
5626            break;
5627        default:
5628            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5629                available_min_durations[idx] = scalar_formats[j];
5630                available_min_durations[idx+1] =
5631                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5632                available_min_durations[idx+2] =
5633                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5634                available_min_durations[idx+3] =
5635                    gCamCapability[cameraId]->picture_min_duration[i];
5636                idx+=4;
5637            }
5638            break;
5639        }
5640    }
5641    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
5642                      &available_min_durations[0], idx);
5643
5644    Vector<int32_t> available_hfr_configs;
5645    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
5646        int32_t fps = 0;
5647        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
5648        case CAM_HFR_MODE_60FPS:
5649            fps = 60;
5650            break;
5651        case CAM_HFR_MODE_90FPS:
5652            fps = 90;
5653            break;
5654        case CAM_HFR_MODE_120FPS:
5655            fps = 120;
5656            break;
5657        case CAM_HFR_MODE_150FPS:
5658            fps = 150;
5659            break;
5660        case CAM_HFR_MODE_180FPS:
5661            fps = 180;
5662            break;
5663        case CAM_HFR_MODE_210FPS:
5664            fps = 210;
5665            break;
5666        case CAM_HFR_MODE_240FPS:
5667            fps = 240;
5668            break;
5669        case CAM_HFR_MODE_480FPS:
5670            fps = 480;
5671            break;
5672        case CAM_HFR_MODE_OFF:
5673        case CAM_HFR_MODE_MAX:
5674        default:
5675            break;
5676        }
5677
5678        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
5679        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
5680            /* For each HFR frame rate, need to advertise one variable fps range
5681             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
5682             * [120, 120]. While camcorder preview alone is running [30, 120] is
5683             * set by the app. When video recording is started, [120, 120] is
5684             * set. This way sensor configuration does not change when recording
5685             * is started */
5686
5687            /* (width, height, fps_min, fps_max, batch_size_max) */
5688            available_hfr_configs.add(
5689                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5690            available_hfr_configs.add(
5691                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5692            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
5693            available_hfr_configs.add(fps);
5694            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5695
5696            /* (width, height, fps_min, fps_max, batch_size_max) */
5697            available_hfr_configs.add(
5698                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
5699            available_hfr_configs.add(
5700                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
5701            available_hfr_configs.add(fps);
5702            available_hfr_configs.add(fps);
5703            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
5704       }
5705    }
5706    //Advertise HFR capability only if the property is set
5707    char prop[PROPERTY_VALUE_MAX];
5708    memset(prop, 0, sizeof(prop));
5709    property_get("persist.camera.hal3hfr.enable", prop, "1");
5710    uint8_t hfrEnable = (uint8_t)atoi(prop);
5711
5712    if(hfrEnable && available_hfr_configs.array()) {
5713        staticInfo.update(
5714                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
5715                available_hfr_configs.array(), available_hfr_configs.size());
5716    }
5717
5718    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
5719    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
5720                      &max_jpeg_size, 1);
5721
5722    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
5723    size_t size = 0;
5724    count = CAM_EFFECT_MODE_MAX;
5725    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
5726    for (size_t i = 0; i < count; i++) {
5727        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5728                gCamCapability[cameraId]->supported_effects[i]);
5729        if (NAME_NOT_FOUND != val) {
5730            avail_effects[size] = (uint8_t)val;
5731            size++;
5732        }
5733    }
5734    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
5735                      avail_effects,
5736                      size);
5737
5738    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
5739    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
5740    size_t supported_scene_modes_cnt = 0;
5741    count = CAM_SCENE_MODE_MAX;
5742    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
5743    for (size_t i = 0; i < count; i++) {
5744        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
5745                CAM_SCENE_MODE_OFF) {
5746            int val = lookupFwkName(SCENE_MODES_MAP,
5747                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
5748                    gCamCapability[cameraId]->supported_scene_modes[i]);
5749            if (NAME_NOT_FOUND != val) {
5750                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
5751                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
5752                supported_scene_modes_cnt++;
5753            }
5754        }
5755    }
5756    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
5757                      avail_scene_modes,
5758                      supported_scene_modes_cnt);
5759
5760    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
5761    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
5762                      supported_scene_modes_cnt,
5763                      CAM_SCENE_MODE_MAX,
5764                      scene_mode_overrides,
5765                      supported_indexes,
5766                      cameraId);
5767
5768    if (supported_scene_modes_cnt == 0) {
5769        supported_scene_modes_cnt = 1;
5770        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
5771    }
5772
5773    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
5774            scene_mode_overrides, supported_scene_modes_cnt * 3);
5775
5776    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
5777                                         ANDROID_CONTROL_MODE_AUTO,
5778                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
5779    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
5780            available_control_modes,
5781            3);
5782
5783    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
5784    size = 0;
5785    count = CAM_ANTIBANDING_MODE_MAX;
5786    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
5787    for (size_t i = 0; i < count; i++) {
5788        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5789                gCamCapability[cameraId]->supported_antibandings[i]);
5790        if (NAME_NOT_FOUND != val) {
5791            avail_antibanding_modes[size] = (uint8_t)val;
5792            size++;
5793        }
5794
5795    }
5796    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
5797                      avail_antibanding_modes,
5798                      size);
5799
5800    uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
5801    size = 0;
5802    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
5803    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
5804    if (0 == count) {
5805        avail_abberation_modes[0] =
5806                ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5807        size++;
5808    } else {
5809        for (size_t i = 0; i < count; i++) {
5810            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5811                    gCamCapability[cameraId]->aberration_modes[i]);
5812            if (NAME_NOT_FOUND != val) {
5813                avail_abberation_modes[size] = (uint8_t)val;
5814                size++;
5815            } else {
5816                ALOGE("%s: Invalid CAC mode %d", __func__,
5817                        gCamCapability[cameraId]->aberration_modes[i]);
5818                break;
5819            }
5820        }
5821
5822    }
5823    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
5824            avail_abberation_modes,
5825            size);
5826
5827    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
5828    size = 0;
5829    count = CAM_FOCUS_MODE_MAX;
5830    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
5831    for (size_t i = 0; i < count; i++) {
5832        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
5833                gCamCapability[cameraId]->supported_focus_modes[i]);
5834        if (NAME_NOT_FOUND != val) {
5835            avail_af_modes[size] = (uint8_t)val;
5836            size++;
5837        }
5838    }
5839    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
5840                      avail_af_modes,
5841                      size);
5842
5843    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
5844    size = 0;
5845    count = CAM_WB_MODE_MAX;
5846    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
5847    for (size_t i = 0; i < count; i++) {
5848        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5849                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
5850                gCamCapability[cameraId]->supported_white_balances[i]);
5851        if (NAME_NOT_FOUND != val) {
5852            avail_awb_modes[size] = (uint8_t)val;
5853            size++;
5854        }
5855    }
5856    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
5857                      avail_awb_modes,
5858                      size);
5859
5860    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
5861    count = CAM_FLASH_FIRING_LEVEL_MAX;
5862    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
5863            count);
5864    for (size_t i = 0; i < count; i++) {
5865        available_flash_levels[i] =
5866                gCamCapability[cameraId]->supported_firing_levels[i];
5867    }
5868    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
5869            available_flash_levels, count);
5870
5871    uint8_t flashAvailable;
5872    if (gCamCapability[cameraId]->flash_available)
5873        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
5874    else
5875        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
5876    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
5877            &flashAvailable, 1);
5878
5879    Vector<uint8_t> avail_ae_modes;
5880    count = CAM_AE_MODE_MAX;
5881    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
5882    for (size_t i = 0; i < count; i++) {
5883        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
5884    }
5885    if (flashAvailable) {
5886        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
5887        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
5888        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
5889    }
5890    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
5891                      avail_ae_modes.array(),
5892                      avail_ae_modes.size());
5893
5894    int32_t sensitivity_range[2];
5895    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
5896    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
5897    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
5898                      sensitivity_range,
5899                      sizeof(sensitivity_range) / sizeof(int32_t));
5900
5901    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
5902                      &gCamCapability[cameraId]->max_analog_sensitivity,
5903                      1);
5904
5905    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
5906    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
5907                      &sensor_orientation,
5908                      1);
5909
5910    int32_t max_output_streams[] = {
5911            MAX_STALLING_STREAMS,
5912            MAX_PROCESSED_STREAMS,
5913            MAX_RAW_STREAMS};
5914    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
5915            max_output_streams,
5916            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
5917
5918    uint8_t avail_leds = 0;
5919    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
5920                      &avail_leds, 0);
5921
5922    uint8_t focus_dist_calibrated;
5923    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
5924            gCamCapability[cameraId]->focus_dist_calibrated);
5925    if (NAME_NOT_FOUND != val) {
5926        focus_dist_calibrated = (uint8_t)val;
5927        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
5928                     &focus_dist_calibrated, 1);
5929    }
5930
5931    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
5932    size = 0;
5933    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
5934            MAX_TEST_PATTERN_CNT);
5935    for (size_t i = 0; i < count; i++) {
5936        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
5937                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
5938        if (NAME_NOT_FOUND != testpatternMode) {
5939            avail_testpattern_modes[size] = testpatternMode;
5940            size++;
5941        }
5942    }
5943    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
5944                      avail_testpattern_modes,
5945                      size);
5946
5947    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
5948    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
5949                      &max_pipeline_depth,
5950                      1);
5951
5952    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
5953    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
5954                      &partial_result_count,
5955                       1);
5956
5957    int32_t max_stall_duration = MAX_REPROCESS_STALL;
5958    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
5959
5960    Vector<uint8_t> available_capabilities;
5961    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
5962    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
5963    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
5964    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
5965    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
5966    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
5967    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
5968    if (hfrEnable && available_hfr_configs.array()) {
5969        available_capabilities.add(
5970                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
5971    }
5972
5973    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
5974        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
5975    }
5976    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
5977            available_capabilities.array(),
5978            available_capabilities.size());
5979
5980    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
5981    //BURST_CAPTURE.
5982    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
5983            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
5984
5985    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
5986            &aeLockAvailable, 1);
5987
5988    //awbLockAvailable to be set to true if capabilities has
5989    //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
5990    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
5991            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
5992
5993    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
5994            &awbLockAvailable, 1);
5995
5996    int32_t max_input_streams = 1;
5997    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
5998                      &max_input_streams,
5999                      1);
6000
6001    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6002    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6003            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6004            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6005            HAL_PIXEL_FORMAT_YCbCr_420_888};
6006    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6007                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6008
6009    int32_t max_latency = (limitedDevice) ?
6010            CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6011    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6012                      &max_latency,
6013                      1);
6014
6015    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6016                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6017    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6018            available_hot_pixel_modes,
6019            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6020
6021    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6022                                         ANDROID_SHADING_MODE_FAST,
6023                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6024    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6025                      available_shading_modes,
6026                      3);
6027
6028    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6029                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6030    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6031                      available_lens_shading_map_modes,
6032                      2);
6033
6034    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6035                                      ANDROID_EDGE_MODE_FAST,
6036                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6037                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6038    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6039            available_edge_modes,
6040            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6041
6042    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6043                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6044                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6045                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6046                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6047    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6048            available_noise_red_modes,
6049            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6050
6051    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6052                                         ANDROID_TONEMAP_MODE_FAST,
6053                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6054    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6055            available_tonemap_modes,
6056            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6057
6058    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6059    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6060            available_hot_pixel_map_modes,
6061            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6062
6063    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6064            gCamCapability[cameraId]->reference_illuminant1);
6065    if (NAME_NOT_FOUND != val) {
6066        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6067        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6068    }
6069
6070    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6071            gCamCapability[cameraId]->reference_illuminant2);
6072    if (NAME_NOT_FOUND != val) {
6073        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6074        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6075    }
6076
6077    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6078            (void *)gCamCapability[cameraId]->forward_matrix1,
6079            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6080
6081    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6082            (void *)gCamCapability[cameraId]->forward_matrix2,
6083            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6084
6085    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6086            (void *)gCamCapability[cameraId]->color_transform1,
6087            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6088
6089    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6090            (void *)gCamCapability[cameraId]->color_transform2,
6091            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6092
6093    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6094            (void *)gCamCapability[cameraId]->calibration_transform1,
6095            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6096
6097    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6098            (void *)gCamCapability[cameraId]->calibration_transform2,
6099            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6100
6101    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6102       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6103       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6104       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6105       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6106       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6107       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6108       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6109       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6110       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6111       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6112       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
6113       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6114       ANDROID_JPEG_GPS_COORDINATES,
6115       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6116       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6117       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6118       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6119       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6120       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6121       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6122       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6123       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6124       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6125       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
6126       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6127       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6128       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6129       ANDROID_BLACK_LEVEL_LOCK };
6130
6131    size_t request_keys_cnt =
6132            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6133    Vector<int32_t> available_request_keys;
6134    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6135    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6136        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6137    }
6138    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6139            available_request_keys.array(), available_request_keys.size());
6140
6141    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6142       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6143       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6144       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6145       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6146       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6147       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6148       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6149       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6150       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6151       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6152       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6153       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6154       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6155       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6156       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6157       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6158       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6159       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6160       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6161       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
6162       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
6163       ANDROID_STATISTICS_FACE_SCORES};
6164    size_t result_keys_cnt =
6165            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6166
6167    Vector<int32_t> available_result_keys;
6168    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6169    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6170        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6171    }
6172    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6173       available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6174       available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6175    }
6176    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6177            available_result_keys.array(), available_result_keys.size());
6178
6179    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6180       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6181       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6182       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6183       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6184       ANDROID_SCALER_CROPPING_TYPE,
6185       ANDROID_SYNC_MAX_LATENCY,
6186       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6187       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6188       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6189       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6190       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6191       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6192       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6193       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6194       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6195       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6196       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6197       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6198       ANDROID_LENS_FACING,
6199       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6200       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6201       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6202       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6203       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6204       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6205       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6206       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6207       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6208       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6209       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6210       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6211       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6212       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6213       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6214       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6215       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6216       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6217       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6218       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6219       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6220       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6221       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6222       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6223       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6224       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6225       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6226       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6227       ANDROID_TONEMAP_MAX_CURVE_POINTS,
6228       ANDROID_CONTROL_AVAILABLE_MODES,
6229       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6230       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6231       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6232       ANDROID_SHADING_AVAILABLE_MODES,
6233       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6234    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6235                      available_characteristics_keys,
6236                      sizeof(available_characteristics_keys)/sizeof(int32_t));
6237
6238    /*available stall durations depend on the hw + sw and will be different for different devices */
6239    /*have to add for raw after implementation*/
6240    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6241    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6242
6243    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6244    size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6245            MAX_SIZES_CNT);
6246    size_t available_stall_size = count * 4;
6247    int64_t available_stall_durations[available_stall_size];
6248    idx = 0;
6249    for (uint32_t j = 0; j < stall_formats_count; j++) {
6250       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6251          for (uint32_t i = 0; i < count; i++) {
6252             available_stall_durations[idx]   = stall_formats[j];
6253             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6254             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6255             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6256             idx+=4;
6257          }
6258       } else {
6259          for (uint32_t i = 0; i < raw_count; i++) {
6260             available_stall_durations[idx]   = stall_formats[j];
6261             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6262             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6263             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6264             idx+=4;
6265          }
6266       }
6267    }
6268    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6269                      available_stall_durations,
6270                      idx);
6271    //QCAMERA3_OPAQUE_RAW
6272    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6273    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6274    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6275    case LEGACY_RAW:
6276        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6277            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6278        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6279            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6280        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6281            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6282        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6283        break;
6284    case MIPI_RAW:
6285        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6286            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6287        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6288            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6289        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6290            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6291        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6292        break;
6293    default:
6294        ALOGE("%s: unknown opaque_raw_format %d", __func__,
6295                gCamCapability[cameraId]->opaque_raw_fmt);
6296        break;
6297    }
6298    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6299
6300    int32_t strides[3*raw_count];
6301    for (size_t i = 0; i < raw_count; i++) {
6302        cam_stream_buf_plane_info_t buf_planes;
6303        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6304        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6305        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6306            &gCamCapability[cameraId]->padding_info, &buf_planes);
6307        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6308    }
6309    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6310            3*raw_count);
6311
6312    gStaticMetadata[cameraId] = staticInfo.release();
6313    return rc;
6314}
6315
6316/*===========================================================================
6317 * FUNCTION   : makeTable
6318 *
6319 * DESCRIPTION: make a table of sizes
6320 *
6321 * PARAMETERS :
6322 *
6323 *
6324 *==========================================================================*/
6325void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6326        size_t max_size, int32_t *sizeTable)
6327{
6328    size_t j = 0;
6329    if (size > max_size) {
6330       size = max_size;
6331    }
6332    for (size_t i = 0; i < size; i++) {
6333        sizeTable[j] = dimTable[i].width;
6334        sizeTable[j+1] = dimTable[i].height;
6335        j+=2;
6336    }
6337}
6338
6339/*===========================================================================
6340 * FUNCTION   : makeFPSTable
6341 *
6342 * DESCRIPTION: make a table of fps ranges
6343 *
6344 * PARAMETERS :
6345 *
6346 *==========================================================================*/
6347void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6348        size_t max_size, int32_t *fpsRangesTable)
6349{
6350    size_t j = 0;
6351    if (size > max_size) {
6352       size = max_size;
6353    }
6354    for (size_t i = 0; i < size; i++) {
6355        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6356        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6357        j+=2;
6358    }
6359}
6360
6361/*===========================================================================
6362 * FUNCTION   : makeOverridesList
6363 *
6364 * DESCRIPTION: make a list of scene mode overrides
6365 *
6366 * PARAMETERS :
6367 *
6368 *
6369 *==========================================================================*/
6370void QCamera3HardwareInterface::makeOverridesList(
6371        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6372        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6373{
6374    /*daemon will give a list of overrides for all scene modes.
6375      However we should send the fwk only the overrides for the scene modes
6376      supported by the framework*/
6377    size_t j = 0;
6378    if (size > max_size) {
6379       size = max_size;
6380    }
6381    size_t focus_count = CAM_FOCUS_MODE_MAX;
6382    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6383            focus_count);
6384    for (size_t i = 0; i < size; i++) {
6385        bool supt = false;
6386        size_t index = supported_indexes[i];
6387        overridesList[j] = gCamCapability[camera_id]->flash_available ?
6388                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6389        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6390                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6391                overridesTable[index].awb_mode);
6392        if (NAME_NOT_FOUND != val) {
6393            overridesList[j+1] = (uint8_t)val;
6394        }
6395        uint8_t focus_override = overridesTable[index].af_mode;
6396        for (size_t k = 0; k < focus_count; k++) {
6397           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6398              supt = true;
6399              break;
6400           }
6401        }
6402        if (supt) {
6403            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6404                    focus_override);
6405            if (NAME_NOT_FOUND != val) {
6406                overridesList[j+2] = (uint8_t)val;
6407            }
6408        } else {
6409           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6410        }
6411        j+=3;
6412    }
6413}
6414
6415/*===========================================================================
6416 * FUNCTION   : filterJpegSizes
6417 *
6418 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6419 *              could be downscaled to
6420 *
6421 * PARAMETERS :
6422 *
6423 * RETURN     : length of jpegSizes array
6424 *==========================================================================*/
6425
6426size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6427        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6428        uint8_t downscale_factor)
6429{
6430    if (0 == downscale_factor) {
6431        downscale_factor = 1;
6432    }
6433
6434    int32_t min_width = active_array_size.width / downscale_factor;
6435    int32_t min_height = active_array_size.height / downscale_factor;
6436    size_t jpegSizesCnt = 0;
6437    if (processedSizesCnt > maxCount) {
6438        processedSizesCnt = maxCount;
6439    }
6440    for (size_t i = 0; i < processedSizesCnt; i+=2) {
6441        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
6442            jpegSizes[jpegSizesCnt] = processedSizes[i];
6443            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
6444            jpegSizesCnt += 2;
6445        }
6446    }
6447    return jpegSizesCnt;
6448}
6449
6450/*===========================================================================
6451 * FUNCTION   : getPreviewHalPixelFormat
6452 *
6453 * DESCRIPTION: convert the format to type recognized by framework
6454 *
6455 * PARAMETERS : format : the format from backend
6456 *
6457 ** RETURN    : format recognized by framework
6458 *
6459 *==========================================================================*/
6460int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
6461{
6462    int32_t halPixelFormat;
6463
6464    switch (format) {
6465    case CAM_FORMAT_YUV_420_NV12:
6466        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
6467        break;
6468    case CAM_FORMAT_YUV_420_NV21:
6469        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6470        break;
6471    case CAM_FORMAT_YUV_420_NV21_ADRENO:
6472        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
6473        break;
6474    case CAM_FORMAT_YUV_420_YV12:
6475        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
6476        break;
6477    case CAM_FORMAT_YUV_422_NV16:
6478    case CAM_FORMAT_YUV_422_NV61:
6479    default:
6480        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6481        break;
6482    }
6483    return halPixelFormat;
6484}
6485
6486/*===========================================================================
6487 * FUNCTION   : computeNoiseModelEntryS
6488 *
6489 * DESCRIPTION: function to map a given sensitivity to the S noise
6490 *              model parameters in the DNG noise model.
6491 *
6492 * PARAMETERS : sens : the sensor sensitivity
6493 *
6494 ** RETURN    : S (sensor amplification) noise
6495 *
6496 *==========================================================================*/
6497double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
6498    double s = gCamCapability[mCameraId]->gradient_S * sens +
6499            gCamCapability[mCameraId]->offset_S;
6500    return ((s < 0.0) ? 0.0 : s);
6501}
6502
6503/*===========================================================================
6504 * FUNCTION   : computeNoiseModelEntryO
6505 *
6506 * DESCRIPTION: function to map a given sensitivity to the O noise
6507 *              model parameters in the DNG noise model.
6508 *
6509 * PARAMETERS : sens : the sensor sensitivity
6510 *
6511 ** RETURN    : O (sensor readout) noise
6512 *
6513 *==========================================================================*/
6514double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
6515    double o = gCamCapability[mCameraId]->gradient_O * sens +
6516            gCamCapability[mCameraId]->offset_O;
6517    return ((o < 0.0) ? 0.0 : o);
6518}
6519
6520/*===========================================================================
6521 * FUNCTION   : getSensorSensitivity
6522 *
6523 * DESCRIPTION: convert iso_mode to an integer value
6524 *
6525 * PARAMETERS : iso_mode : the iso_mode supported by sensor
6526 *
6527 ** RETURN    : sensitivity supported by sensor
6528 *
6529 *==========================================================================*/
6530int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
6531{
6532    int32_t sensitivity;
6533
6534    switch (iso_mode) {
6535    case CAM_ISO_MODE_100:
6536        sensitivity = 100;
6537        break;
6538    case CAM_ISO_MODE_200:
6539        sensitivity = 200;
6540        break;
6541    case CAM_ISO_MODE_400:
6542        sensitivity = 400;
6543        break;
6544    case CAM_ISO_MODE_800:
6545        sensitivity = 800;
6546        break;
6547    case CAM_ISO_MODE_1600:
6548        sensitivity = 1600;
6549        break;
6550    default:
6551        sensitivity = -1;
6552        break;
6553    }
6554    return sensitivity;
6555}
6556
6557/*===========================================================================
6558 * FUNCTION   : getCamInfo
6559 *
6560 * DESCRIPTION: query camera capabilities
6561 *
6562 * PARAMETERS :
6563 *   @cameraId  : camera Id
6564 *   @info      : camera info struct to be filled in with camera capabilities
6565 *
6566 * RETURN     : int type of status
6567 *              NO_ERROR  -- success
6568 *              none-zero failure code
6569 *==========================================================================*/
6570int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
6571        struct camera_info *info)
6572{
6573    ATRACE_CALL();
6574    int rc = 0;
6575
6576    pthread_mutex_lock(&gCamLock);
6577    if (NULL == gCamCapability[cameraId]) {
6578        rc = initCapabilities(cameraId);
6579        if (rc < 0) {
6580            pthread_mutex_unlock(&gCamLock);
6581            return rc;
6582        }
6583    }
6584
6585    if (NULL == gStaticMetadata[cameraId]) {
6586        rc = initStaticMetadata(cameraId);
6587        if (rc < 0) {
6588            pthread_mutex_unlock(&gCamLock);
6589            return rc;
6590        }
6591    }
6592
6593    switch(gCamCapability[cameraId]->position) {
6594    case CAM_POSITION_BACK:
6595        info->facing = CAMERA_FACING_BACK;
6596        break;
6597
6598    case CAM_POSITION_FRONT:
6599        info->facing = CAMERA_FACING_FRONT;
6600        break;
6601
6602    default:
6603        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
6604        rc = -1;
6605        break;
6606    }
6607
6608
6609    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
6610    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
6611    info->static_camera_characteristics = gStaticMetadata[cameraId];
6612
6613    //For now assume both cameras can operate independently.
6614    info->conflicting_devices = NULL;
6615    info->conflicting_devices_length = 0;
6616
6617    //resource cost is 100 * MIN(1.0, m/M),
6618    //where m is throughput requirement with maximum stream configuration
6619    //and M is CPP maximum throughput.
6620    float max_fps = 0.0;
6621    for (uint32_t i = 0;
6622            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
6623        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
6624            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
6625    }
6626    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
6627            gCamCapability[cameraId]->active_array_size.width *
6628            gCamCapability[cameraId]->active_array_size.height * max_fps /
6629            gCamCapability[cameraId]->max_pixel_bandwidth;
6630    info->resource_cost = 100 * MIN(1.0, ratio);
6631    ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
6632            info->resource_cost);
6633
6634    pthread_mutex_unlock(&gCamLock);
6635    return rc;
6636}
6637
6638/*===========================================================================
6639 * FUNCTION   : translateCapabilityToMetadata
6640 *
6641 * DESCRIPTION: translate the capability into camera_metadata_t
6642 *
6643 * PARAMETERS : type of the request
6644 *
6645 *
6646 * RETURN     : success: camera_metadata_t*
6647 *              failure: NULL
6648 *
6649 *==========================================================================*/
6650camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
6651{
6652    if (mDefaultMetadata[type] != NULL) {
6653        return mDefaultMetadata[type];
6654    }
6655    //first time we are handling this request
6656    //fill up the metadata structure using the wrapper class
6657    CameraMetadata settings;
6658    //translate from cam_capability_t to camera_metadata_tag_t
6659    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
6660    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
6661    int32_t defaultRequestID = 0;
6662    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
6663
6664    /* OIS disable */
6665    char ois_prop[PROPERTY_VALUE_MAX];
6666    memset(ois_prop, 0, sizeof(ois_prop));
6667    property_get("persist.camera.ois.disable", ois_prop, "0");
6668    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
6669
6670    /* Force video to use OIS */
6671    char videoOisProp[PROPERTY_VALUE_MAX];
6672    memset(videoOisProp, 0, sizeof(videoOisProp));
6673    property_get("persist.camera.ois.video", videoOisProp, "1");
6674    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
6675
6676    uint8_t controlIntent = 0;
6677    uint8_t focusMode;
6678    uint8_t vsMode;
6679    uint8_t optStabMode;
6680    uint8_t cacMode;
6681    uint8_t edge_mode;
6682    uint8_t noise_red_mode;
6683    uint8_t tonemap_mode;
6684    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6685    switch (type) {
6686      case CAMERA3_TEMPLATE_PREVIEW:
6687        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
6688        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6689        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6690        edge_mode = ANDROID_EDGE_MODE_FAST;
6691        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6692        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6693        break;
6694      case CAMERA3_TEMPLATE_STILL_CAPTURE:
6695        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
6696        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6697        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6698        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
6699        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
6700        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
6701        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
6702        settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
6703        break;
6704      case CAMERA3_TEMPLATE_VIDEO_RECORD:
6705        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
6706        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6707        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6708        edge_mode = ANDROID_EDGE_MODE_FAST;
6709        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6710        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6711        if (forceVideoOis)
6712            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6713        break;
6714      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
6715        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
6716        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
6717        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6718        edge_mode = ANDROID_EDGE_MODE_FAST;
6719        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6720        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6721        if (forceVideoOis)
6722            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6723        break;
6724      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
6725        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
6726        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
6727        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6728        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
6729        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
6730        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6731        break;
6732      case CAMERA3_TEMPLATE_MANUAL:
6733        edge_mode = ANDROID_EDGE_MODE_FAST;
6734        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6735        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6736        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
6737        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6738        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6739        break;
6740      default:
6741        edge_mode = ANDROID_EDGE_MODE_FAST;
6742        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
6743        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
6744        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
6745        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6746        break;
6747    }
6748    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
6749    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
6750    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
6751        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
6752    }
6753    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
6754
6755    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6756            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
6757        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
6758    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
6759            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
6760            || ois_disable)
6761        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
6762    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
6763
6764    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6765            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
6766
6767    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
6768    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
6769
6770    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
6771    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
6772
6773    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
6774    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
6775
6776    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
6777    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
6778
6779    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
6780    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
6781
6782    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
6783    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
6784
6785    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
6786    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
6787
6788    /*flash*/
6789    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
6790    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
6791
6792    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
6793    settings.update(ANDROID_FLASH_FIRING_POWER,
6794            &flashFiringLevel, 1);
6795
6796    /* lens */
6797    float default_aperture = gCamCapability[mCameraId]->apertures[0];
6798    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
6799
6800    if (gCamCapability[mCameraId]->filter_densities_count) {
6801        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
6802        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
6803                        gCamCapability[mCameraId]->filter_densities_count);
6804    }
6805
6806    float default_focal_length = gCamCapability[mCameraId]->focal_length;
6807    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
6808
6809    float default_focus_distance = 0;
6810    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
6811
6812    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
6813    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
6814
6815    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6816    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6817
6818    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
6819    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
6820
6821    /* face detection (default to OFF) */
6822    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
6823    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
6824
6825    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
6826    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
6827
6828    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
6829    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
6830
6831    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6832    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6833
6834    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
6835    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
6836
6837    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
6838    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
6839
6840    /* Exposure time(Update the Min Exposure Time)*/
6841    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
6842    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
6843
6844    /* frame duration */
6845    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
6846    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
6847
6848    /* sensitivity */
6849    static const int32_t default_sensitivity = 100;
6850    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
6851
6852    /*edge mode*/
6853    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
6854
6855    /*noise reduction mode*/
6856    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
6857
6858    /*color correction mode*/
6859    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
6860    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
6861
6862    /*transform matrix mode*/
6863    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
6864
6865    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
6866    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
6867
6868    int32_t scaler_crop_region[4];
6869    scaler_crop_region[0] = 0;
6870    scaler_crop_region[1] = 0;
6871    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
6872    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
6873    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
6874
6875    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
6876    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
6877
6878    /*focus distance*/
6879    float focus_distance = 0.0;
6880    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
6881
6882    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
6883    float max_range = 0.0;
6884    float max_fixed_fps = 0.0;
6885    int32_t fps_range[2] = {0, 0};
6886    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
6887            i++) {
6888        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
6889            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6890        if (type == CAMERA3_TEMPLATE_PREVIEW ||
6891                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
6892                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
6893            if (range > max_range) {
6894                fps_range[0] =
6895                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6896                fps_range[1] =
6897                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6898                max_range = range;
6899            }
6900        } else {
6901            if (range < 0.01 && max_fixed_fps <
6902                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
6903                fps_range[0] =
6904                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
6905                fps_range[1] =
6906                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6907                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
6908            }
6909        }
6910    }
6911    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
6912
6913    /*precapture trigger*/
6914    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
6915    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
6916
6917    /*af trigger*/
6918    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
6919    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
6920
6921    /* ae & af regions */
6922    int32_t active_region[] = {
6923            gCamCapability[mCameraId]->active_array_size.left,
6924            gCamCapability[mCameraId]->active_array_size.top,
6925            gCamCapability[mCameraId]->active_array_size.left +
6926                    gCamCapability[mCameraId]->active_array_size.width,
6927            gCamCapability[mCameraId]->active_array_size.top +
6928                    gCamCapability[mCameraId]->active_array_size.height,
6929            0};
6930    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
6931            sizeof(active_region) / sizeof(active_region[0]));
6932    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
6933            sizeof(active_region) / sizeof(active_region[0]));
6934
6935    /* black level lock */
6936    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
6937    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
6938
6939    /* lens shading map mode */
6940    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
6941    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
6942        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
6943    }
6944    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
6945
6946    //special defaults for manual template
6947    if (type == CAMERA3_TEMPLATE_MANUAL) {
6948        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
6949        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
6950
6951        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
6952        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
6953
6954        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
6955        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
6956
6957        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
6958        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
6959
6960        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
6961        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
6962
6963        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
6964        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
6965    }
6966
6967    /* TNR default */
6968    uint8_t tnr_enable       = m_bTnrEnabled;
6969    int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
6970    settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6971    settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6972    CDBG("%s: default TNR enable %d, process plate %d", __func__, tnr_enable, tnr_process_type);
6973
6974    /* CDS default */
6975    char prop[PROPERTY_VALUE_MAX];
6976    memset(prop, 0, sizeof(prop));
6977    property_get("persist.camera.CDS", prop, "Auto");
6978    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
6979    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
6980    if (CAM_CDS_MODE_MAX == cds_mode) {
6981        cds_mode = CAM_CDS_MODE_AUTO;
6982    }
6983    //@note: force cds mode to be OFF when TNR is enabled.
6984    if (m_bTnrEnabled == true) {
6985        CDBG_HIGH("%s: default CDS mode %d is forced to be OFF because TNR is enabled.",
6986                __func__, cds_mode);
6987        cds_mode = CAM_CDS_MODE_OFF;
6988    }
6989    int32_t mode = cds_mode;
6990    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
6991    mDefaultMetadata[type] = settings.release();
6992
6993    return mDefaultMetadata[type];
6994}
6995
6996/*===========================================================================
6997 * FUNCTION   : setFrameParameters
6998 *
6999 * DESCRIPTION: set parameters per frame as requested in the metadata from
7000 *              framework
7001 *
7002 * PARAMETERS :
7003 *   @request   : request that needs to be serviced
7004 *   @streamID : Stream ID of all the requested streams
7005 *   @blob_request: Whether this request is a blob request or not
7006 *
7007 * RETURN     : success: NO_ERROR
7008 *              failure:
7009 *==========================================================================*/
7010int QCamera3HardwareInterface::setFrameParameters(
7011                    camera3_capture_request_t *request,
7012                    cam_stream_ID_t streamID,
7013                    int blob_request,
7014                    uint32_t snapshotStreamId)
7015{
7016    /*translate from camera_metadata_t type to parm_type_t*/
7017    int rc = 0;
7018    int32_t hal_version = CAM_HAL_V3;
7019
7020    clear_metadata_buffer(mParameters);
7021    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7022        ALOGE("%s: Failed to set hal version in the parameters", __func__);
7023        return BAD_VALUE;
7024    }
7025
7026    /*we need to update the frame number in the parameters*/
7027    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7028            request->frame_number)) {
7029        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7030        return BAD_VALUE;
7031    }
7032
7033    /* Update stream id of all the requested buffers */
7034    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7035        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7036        return BAD_VALUE;
7037    }
7038
7039    if (mUpdateDebugLevel) {
7040        uint32_t dummyDebugLevel = 0;
7041        /* The value of dummyDebugLevel is irrelavent. On
7042         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7043        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7044                dummyDebugLevel)) {
7045            ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7046            return BAD_VALUE;
7047        }
7048        mUpdateDebugLevel = false;
7049    }
7050
7051    if(request->settings != NULL){
7052        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7053        if (blob_request)
7054            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7055    }
7056
7057    return rc;
7058}
7059
7060/*===========================================================================
7061 * FUNCTION   : setReprocParameters
7062 *
7063 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7064 *              return it.
7065 *
7066 * PARAMETERS :
7067 *   @request   : request that needs to be serviced
7068 *
7069 * RETURN     : success: NO_ERROR
7070 *              failure:
7071 *==========================================================================*/
7072int32_t QCamera3HardwareInterface::setReprocParameters(
7073        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7074        uint32_t snapshotStreamId)
7075{
7076    /*translate from camera_metadata_t type to parm_type_t*/
7077    int rc = 0;
7078
7079    if (NULL == request->settings){
7080        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7081        return BAD_VALUE;
7082    }
7083
7084    if (NULL == reprocParam) {
7085        ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7086        return BAD_VALUE;
7087    }
7088    clear_metadata_buffer(reprocParam);
7089
7090    /*we need to update the frame number in the parameters*/
7091    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7092            request->frame_number)) {
7093        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7094        return BAD_VALUE;
7095    }
7096
7097    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7098    if (rc < 0) {
7099        ALOGE("%s: Failed to translate reproc request", __func__);
7100        return rc;
7101    }
7102
7103    CameraMetadata frame_settings;
7104    frame_settings = request->settings;
7105    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7106            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7107        int32_t *crop_count =
7108                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7109        int32_t *crop_data =
7110                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7111        int32_t *roi_map =
7112                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7113        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7114            cam_crop_data_t crop_meta;
7115            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7116            crop_meta.num_of_streams = 1;
7117            crop_meta.crop_info[0].crop.left   = crop_data[0];
7118            crop_meta.crop_info[0].crop.top    = crop_data[1];
7119            crop_meta.crop_info[0].crop.width  = crop_data[2];
7120            crop_meta.crop_info[0].crop.height = crop_data[3];
7121
7122            crop_meta.crop_info[0].roi_map.left =
7123                    roi_map[0];
7124            crop_meta.crop_info[0].roi_map.top =
7125                    roi_map[1];
7126            crop_meta.crop_info[0].roi_map.width =
7127                    roi_map[2];
7128            crop_meta.crop_info[0].roi_map.height =
7129                    roi_map[3];
7130
7131            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7132                rc = BAD_VALUE;
7133            }
7134            CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7135                    __func__,
7136                    request->input_buffer->stream,
7137                    crop_meta.crop_info[0].crop.left,
7138                    crop_meta.crop_info[0].crop.top,
7139                    crop_meta.crop_info[0].crop.width,
7140                    crop_meta.crop_info[0].crop.height);
7141            CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7142                    __func__,
7143                    request->input_buffer->stream,
7144                    crop_meta.crop_info[0].roi_map.left,
7145                    crop_meta.crop_info[0].roi_map.top,
7146                    crop_meta.crop_info[0].roi_map.width,
7147                    crop_meta.crop_info[0].roi_map.height);
7148            } else {
7149                ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7150            }
7151    } else {
7152        ALOGE("%s: No crop data from matching output stream", __func__);
7153    }
7154
7155    return rc;
7156}
7157
7158/*===========================================================================
7159 * FUNCTION   : setHalFpsRange
7160 *
7161 * DESCRIPTION: set FPS range parameter
7162 *
7163 *
7164 * PARAMETERS :
7165 *   @settings    : Metadata from framework
7166 *   @hal_metadata: Metadata buffer
7167 *
7168 *
7169 * RETURN     : success: NO_ERROR
7170 *              failure:
7171 *==========================================================================*/
7172int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7173        metadata_buffer_t *hal_metadata)
7174{
7175    int32_t rc = NO_ERROR;
7176    cam_fps_range_t fps_range;
7177    fps_range.min_fps = (float)
7178            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7179    fps_range.max_fps = (float)
7180            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7181    fps_range.video_min_fps = fps_range.min_fps;
7182    fps_range.video_max_fps = fps_range.max_fps;
7183
7184    CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7185            fps_range.min_fps, fps_range.max_fps);
7186    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7187     * follows:
7188     * ---------------------------------------------------------------|
7189     *      Video stream is absent in configure_streams               |
7190     *    (Camcorder preview before the first video record            |
7191     * ---------------------------------------------------------------|
7192     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7193     *                   |             |             | vid_min/max_fps|
7194     * ---------------------------------------------------------------|
7195     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7196     *                   |-------------|-------------|----------------|
7197     *                   |  [240, 240] |     240     |  [240, 240]    |
7198     * ---------------------------------------------------------------|
7199     *     Video stream is present in configure_streams               |
7200     * ---------------------------------------------------------------|
7201     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7202     *                   |             |             | vid_min/max_fps|
7203     * ---------------------------------------------------------------|
7204     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7205     * (camcorder prev   |-------------|-------------|----------------|
7206     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
7207     *  is stopped)      |             |             |                |
7208     * ---------------------------------------------------------------|
7209     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
7210     *                   |-------------|-------------|----------------|
7211     *                   |  [240, 240] |     240     |  [240, 240]    |
7212     * ---------------------------------------------------------------|
7213     * When Video stream is absent in configure_streams,
7214     * preview fps = sensor_fps / batchsize
7215     * Eg: for 240fps at batchSize 4, preview = 60fps
7216     *     for 120fps at batchSize 4, preview = 30fps
7217     *
7218     * When video stream is present in configure_streams, preview fps is as per
7219     * the ratio of preview buffers to video buffers requested in process
7220     * capture request
7221     */
7222    mBatchSize = 0;
7223    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7224        fps_range.min_fps = fps_range.video_max_fps;
7225        fps_range.video_min_fps = fps_range.video_max_fps;
7226        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7227                fps_range.max_fps);
7228        if (NAME_NOT_FOUND != val) {
7229            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7230            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7231                return BAD_VALUE;
7232            }
7233
7234            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7235                /* If batchmode is currently in progress and the fps changes,
7236                 * set the flag to restart the sensor */
7237                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7238                        (mHFRVideoFps != fps_range.max_fps)) {
7239                    mNeedSensorRestart = true;
7240                }
7241                mHFRVideoFps = fps_range.max_fps;
7242                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7243                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7244                    mBatchSize = MAX_HFR_BATCH_SIZE;
7245                }
7246             }
7247            CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7248
7249         }
7250    } else {
7251        /* HFR mode is session param in backend/ISP. This should be reset when
7252         * in non-HFR mode  */
7253        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7254        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7255            return BAD_VALUE;
7256        }
7257    }
7258    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7259        return BAD_VALUE;
7260    }
7261    CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7262            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7263    return rc;
7264}
7265
7266/*===========================================================================
7267 * FUNCTION   : translateToHalMetadata
7268 *
7269 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7270 *
7271 *
7272 * PARAMETERS :
7273 *   @request  : request sent from framework
7274 *
7275 *
7276 * RETURN     : success: NO_ERROR
7277 *              failure:
7278 *==========================================================================*/
7279int QCamera3HardwareInterface::translateToHalMetadata
7280                                  (const camera3_capture_request_t *request,
7281                                   metadata_buffer_t *hal_metadata,
7282                                   uint32_t snapshotStreamId)
7283{
7284    int rc = 0;
7285    CameraMetadata frame_settings;
7286    frame_settings = request->settings;
7287
7288    /* Do not change the order of the following list unless you know what you are
7289     * doing.
7290     * The order is laid out in such a way that parameters in the front of the table
7291     * may be used to override the parameters later in the table. Examples are:
7292     * 1. META_MODE should precede AEC/AWB/AF MODE
7293     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7294     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7295     * 4. Any mode should precede it's corresponding settings
7296     */
7297    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7298        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7299        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7300            rc = BAD_VALUE;
7301        }
7302        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7303        if (rc != NO_ERROR) {
7304            ALOGE("%s: extractSceneMode failed", __func__);
7305        }
7306    }
7307
7308    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7309        uint8_t fwk_aeMode =
7310            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7311        uint8_t aeMode;
7312        int32_t redeye;
7313
7314        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
7315            aeMode = CAM_AE_MODE_OFF;
7316        } else {
7317            aeMode = CAM_AE_MODE_ON;
7318        }
7319        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
7320            redeye = 1;
7321        } else {
7322            redeye = 0;
7323        }
7324
7325        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7326                fwk_aeMode);
7327        if (NAME_NOT_FOUND != val) {
7328            int32_t flashMode = (int32_t)val;
7329            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
7330        }
7331
7332        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
7333        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
7334            rc = BAD_VALUE;
7335        }
7336    }
7337
7338    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
7339        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
7340        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7341                fwk_whiteLevel);
7342        if (NAME_NOT_FOUND != val) {
7343            uint8_t whiteLevel = (uint8_t)val;
7344            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
7345                rc = BAD_VALUE;
7346            }
7347        }
7348    }
7349
7350    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
7351        uint8_t fwk_cacMode =
7352                frame_settings.find(
7353                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
7354        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7355                fwk_cacMode);
7356        if (NAME_NOT_FOUND != val) {
7357            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
7358            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
7359                rc = BAD_VALUE;
7360            }
7361        } else {
7362            ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
7363        }
7364    }
7365
7366    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
7367        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
7368        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7369                fwk_focusMode);
7370        if (NAME_NOT_FOUND != val) {
7371            uint8_t focusMode = (uint8_t)val;
7372            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
7373                rc = BAD_VALUE;
7374            }
7375        }
7376    }
7377
7378    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
7379        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
7380        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
7381                focalDistance)) {
7382            rc = BAD_VALUE;
7383        }
7384    }
7385
7386    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
7387        uint8_t fwk_antibandingMode =
7388                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
7389        int val = lookupHalName(ANTIBANDING_MODES_MAP,
7390                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
7391        if (NAME_NOT_FOUND != val) {
7392            uint32_t hal_antibandingMode = (uint32_t)val;
7393            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
7394                    hal_antibandingMode)) {
7395                rc = BAD_VALUE;
7396            }
7397        }
7398    }
7399
7400    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
7401        int32_t expCompensation = frame_settings.find(
7402                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
7403        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
7404            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
7405        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
7406            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
7407        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
7408                expCompensation)) {
7409            rc = BAD_VALUE;
7410        }
7411    }
7412
7413    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
7414        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
7415        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
7416            rc = BAD_VALUE;
7417        }
7418    }
7419    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
7420        rc = setHalFpsRange(frame_settings, hal_metadata);
7421        if (rc != NO_ERROR) {
7422            ALOGE("%s: setHalFpsRange failed", __func__);
7423        }
7424    }
7425
7426    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
7427        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
7428        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
7429            rc = BAD_VALUE;
7430        }
7431    }
7432
7433    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
7434        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
7435        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7436                fwk_effectMode);
7437        if (NAME_NOT_FOUND != val) {
7438            uint8_t effectMode = (uint8_t)val;
7439            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
7440                rc = BAD_VALUE;
7441            }
7442        }
7443    }
7444
7445    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
7446        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
7447        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
7448                colorCorrectMode)) {
7449            rc = BAD_VALUE;
7450        }
7451    }
7452
7453    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
7454        cam_color_correct_gains_t colorCorrectGains;
7455        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
7456            colorCorrectGains.gains[i] =
7457                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
7458        }
7459        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
7460                colorCorrectGains)) {
7461            rc = BAD_VALUE;
7462        }
7463    }
7464
7465    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
7466        cam_color_correct_matrix_t colorCorrectTransform;
7467        cam_rational_type_t transform_elem;
7468        size_t num = 0;
7469        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
7470           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
7471              transform_elem.numerator =
7472                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
7473              transform_elem.denominator =
7474                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
7475              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
7476              num++;
7477           }
7478        }
7479        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
7480                colorCorrectTransform)) {
7481            rc = BAD_VALUE;
7482        }
7483    }
7484
7485    cam_trigger_t aecTrigger;
7486    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
7487    aecTrigger.trigger_id = -1;
7488    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
7489        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
7490        aecTrigger.trigger =
7491            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
7492        aecTrigger.trigger_id =
7493            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
7494        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
7495                aecTrigger)) {
7496            rc = BAD_VALUE;
7497        }
7498        CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
7499                aecTrigger.trigger, aecTrigger.trigger_id);
7500    }
7501
7502    /*af_trigger must come with a trigger id*/
7503    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
7504        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
7505        cam_trigger_t af_trigger;
7506        af_trigger.trigger =
7507            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
7508        af_trigger.trigger_id =
7509            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
7510        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
7511            rc = BAD_VALUE;
7512        }
7513        CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
7514                af_trigger.trigger, af_trigger.trigger_id);
7515    }
7516
7517    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
7518        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
7519        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
7520            rc = BAD_VALUE;
7521        }
7522    }
7523
7524    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
7525        cam_edge_application_t edge_application;
7526        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
7527        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
7528            edge_application.sharpness = 0;
7529        } else {
7530            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
7531                uint8_t edgeStrength = frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
7532                edge_application.sharpness = (int32_t)edgeStrength;
7533            } else {
7534                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
7535            }
7536        }
7537        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
7538            rc = BAD_VALUE;
7539        }
7540    }
7541
7542    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7543        int32_t respectFlashMode = 1;
7544        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7545            uint8_t fwk_aeMode =
7546                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7547            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
7548                respectFlashMode = 0;
7549                CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
7550                    __func__);
7551            }
7552        }
7553        if (respectFlashMode) {
7554            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7555                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7556            CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
7557            // To check: CAM_INTF_META_FLASH_MODE usage
7558            if (NAME_NOT_FOUND != val) {
7559                uint8_t flashMode = (uint8_t)val;
7560                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
7561                    rc = BAD_VALUE;
7562                }
7563            }
7564        }
7565    }
7566
7567    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
7568        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
7569        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
7570            rc = BAD_VALUE;
7571        }
7572    }
7573
7574    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
7575        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
7576        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
7577                flashFiringTime)) {
7578            rc = BAD_VALUE;
7579        }
7580    }
7581
7582    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
7583        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
7584        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
7585                hotPixelMode)) {
7586            rc = BAD_VALUE;
7587        }
7588    }
7589
7590    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
7591        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
7592        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
7593                lensAperture)) {
7594            rc = BAD_VALUE;
7595        }
7596    }
7597
7598    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
7599        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
7600        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
7601                filterDensity)) {
7602            rc = BAD_VALUE;
7603        }
7604    }
7605
7606    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
7607        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
7608        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH, focalLength)) {
7609            rc = BAD_VALUE;
7610        }
7611    }
7612
7613    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
7614        uint8_t optStabMode =
7615                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
7616        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE, optStabMode)) {
7617            rc = BAD_VALUE;
7618        }
7619    }
7620
7621    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
7622        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
7623        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
7624                noiseRedMode)) {
7625            rc = BAD_VALUE;
7626        }
7627    }
7628
7629    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
7630        uint8_t noiseRedStrength =
7631                frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
7632        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
7633                noiseRedStrength)) {
7634            rc = BAD_VALUE;
7635        }
7636    }
7637
7638    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
7639        float reprocessEffectiveExposureFactor =
7640            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
7641        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
7642                reprocessEffectiveExposureFactor)) {
7643            rc = BAD_VALUE;
7644        }
7645    }
7646
7647    cam_crop_region_t scalerCropRegion;
7648    bool scalerCropSet = false;
7649    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
7650        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
7651        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
7652        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
7653        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
7654
7655        // Map coordinate system from active array to sensor output.
7656        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
7657                scalerCropRegion.width, scalerCropRegion.height);
7658
7659        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
7660                scalerCropRegion)) {
7661            rc = BAD_VALUE;
7662        }
7663        scalerCropSet = true;
7664    }
7665
7666    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
7667        int64_t sensorExpTime =
7668                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
7669        CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
7670        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
7671                sensorExpTime)) {
7672            rc = BAD_VALUE;
7673        }
7674    }
7675
7676    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
7677        int64_t sensorFrameDuration =
7678                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
7679        int64_t minFrameDuration = getMinFrameDuration(request);
7680        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
7681        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
7682            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
7683        CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
7684        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
7685                sensorFrameDuration)) {
7686            rc = BAD_VALUE;
7687        }
7688    }
7689
7690    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
7691        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
7692        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
7693                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
7694        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
7695                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
7696        CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
7697        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
7698                sensorSensitivity)) {
7699            rc = BAD_VALUE;
7700        }
7701    }
7702
7703    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
7704        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
7705        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
7706            rc = BAD_VALUE;
7707        }
7708    }
7709
7710    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
7711        uint8_t shadingStrength = frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
7712        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
7713                shadingStrength)) {
7714            rc = BAD_VALUE;
7715        }
7716    }
7717
7718    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
7719        uint8_t fwk_facedetectMode =
7720                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
7721
7722        fwk_facedetectMode = (m_overrideAppFaceDetection < 0) ?
7723                                    fwk_facedetectMode : (uint8_t)m_overrideAppFaceDetection;
7724
7725        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7726                fwk_facedetectMode);
7727
7728        if (NAME_NOT_FOUND != val) {
7729            uint8_t facedetectMode = (uint8_t)val;
7730            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
7731                    facedetectMode)) {
7732                rc = BAD_VALUE;
7733            }
7734        }
7735    }
7736
7737    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
7738        uint8_t histogramMode =
7739                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
7740        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
7741                histogramMode)) {
7742            rc = BAD_VALUE;
7743        }
7744    }
7745
7746    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
7747        uint8_t sharpnessMapMode =
7748                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
7749        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
7750                sharpnessMapMode)) {
7751            rc = BAD_VALUE;
7752        }
7753    }
7754
7755    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
7756        uint8_t tonemapMode =
7757                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
7758        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
7759            rc = BAD_VALUE;
7760        }
7761    }
7762    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
7763    /*All tonemap channels will have the same number of points*/
7764    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
7765        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
7766        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
7767        cam_rgb_tonemap_curves tonemapCurves;
7768        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
7769        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7770            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
7771                    __func__, tonemapCurves.tonemap_points_cnt,
7772                    CAM_MAX_TONEMAP_CURVE_SIZE);
7773            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7774        }
7775
7776        /* ch0 = G*/
7777        size_t point = 0;
7778        cam_tonemap_curve_t tonemapCurveGreen;
7779        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7780            for (size_t j = 0; j < 2; j++) {
7781               tonemapCurveGreen.tonemap_points[i][j] =
7782                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
7783               point++;
7784            }
7785        }
7786        tonemapCurves.curves[0] = tonemapCurveGreen;
7787
7788        /* ch 1 = B */
7789        point = 0;
7790        cam_tonemap_curve_t tonemapCurveBlue;
7791        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7792            for (size_t j = 0; j < 2; j++) {
7793               tonemapCurveBlue.tonemap_points[i][j] =
7794                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
7795               point++;
7796            }
7797        }
7798        tonemapCurves.curves[1] = tonemapCurveBlue;
7799
7800        /* ch 2 = R */
7801        point = 0;
7802        cam_tonemap_curve_t tonemapCurveRed;
7803        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
7804            for (size_t j = 0; j < 2; j++) {
7805               tonemapCurveRed.tonemap_points[i][j] =
7806                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
7807               point++;
7808            }
7809        }
7810        tonemapCurves.curves[2] = tonemapCurveRed;
7811
7812        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
7813                tonemapCurves)) {
7814            rc = BAD_VALUE;
7815        }
7816    }
7817
7818    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
7819        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
7820        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
7821                captureIntent)) {
7822            rc = BAD_VALUE;
7823        }
7824    }
7825
7826    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
7827        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
7828        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
7829                blackLevelLock)) {
7830            rc = BAD_VALUE;
7831        }
7832    }
7833
7834    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
7835        uint8_t lensShadingMapMode =
7836                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
7837        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
7838                lensShadingMapMode)) {
7839            rc = BAD_VALUE;
7840        }
7841    }
7842
7843    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
7844        cam_area_t roi;
7845        bool reset = true;
7846        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
7847
7848        // Map coordinate system from active array to sensor output.
7849        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
7850                roi.rect.height);
7851
7852        if (scalerCropSet) {
7853            reset = resetIfNeededROI(&roi, &scalerCropRegion);
7854        }
7855        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
7856            rc = BAD_VALUE;
7857        }
7858    }
7859
7860    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
7861        cam_area_t roi;
7862        bool reset = true;
7863        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
7864
7865        // Map coordinate system from active array to sensor output.
7866        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
7867                roi.rect.height);
7868
7869        if (scalerCropSet) {
7870            reset = resetIfNeededROI(&roi, &scalerCropRegion);
7871        }
7872        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
7873            rc = BAD_VALUE;
7874        }
7875    }
7876
7877    // CDS for non-HFR mode
7878    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
7879            frame_settings.exists(QCAMERA3_CDS_MODE)) {
7880        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
7881        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
7882            ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
7883        } else {
7884            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
7885                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
7886                rc = BAD_VALUE;
7887            }
7888        }
7889    }
7890
7891    // TNR
7892    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
7893        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
7894        cam_denoise_param_t tnr;
7895        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
7896        tnr.process_plates =
7897            (cam_denoise_process_type_t)frame_settings.find(
7898            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
7899
7900        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
7901            rc = BAD_VALUE;
7902        }
7903    }
7904
7905    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
7906        int32_t fwk_testPatternMode =
7907                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
7908        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
7909                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
7910
7911        if (NAME_NOT_FOUND != testPatternMode) {
7912            cam_test_pattern_data_t testPatternData;
7913            memset(&testPatternData, 0, sizeof(testPatternData));
7914            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
7915            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
7916                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
7917                int32_t *fwk_testPatternData =
7918                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
7919                testPatternData.r = fwk_testPatternData[0];
7920                testPatternData.b = fwk_testPatternData[3];
7921                switch (gCamCapability[mCameraId]->color_arrangement) {
7922                    case CAM_FILTER_ARRANGEMENT_RGGB:
7923                    case CAM_FILTER_ARRANGEMENT_GRBG:
7924                        testPatternData.gr = fwk_testPatternData[1];
7925                        testPatternData.gb = fwk_testPatternData[2];
7926                        break;
7927                    case CAM_FILTER_ARRANGEMENT_GBRG:
7928                    case CAM_FILTER_ARRANGEMENT_BGGR:
7929                        testPatternData.gr = fwk_testPatternData[2];
7930                        testPatternData.gb = fwk_testPatternData[1];
7931                        break;
7932                    default:
7933                        ALOGE("%s: color arrangement %d is not supported", __func__,
7934                                gCamCapability[mCameraId]->color_arrangement);
7935                        break;
7936                }
7937            }
7938            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
7939                    testPatternData)) {
7940                rc = BAD_VALUE;
7941            }
7942        } else {
7943            ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
7944                    fwk_testPatternMode);
7945        }
7946    }
7947
7948    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
7949        size_t count = 0;
7950        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
7951        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
7952                gps_coords.data.d, gps_coords.count, count);
7953        if (gps_coords.count != count) {
7954            rc = BAD_VALUE;
7955        }
7956    }
7957
7958    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
7959        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
7960        size_t count = 0;
7961        const char *gps_methods_src = (const char *)
7962                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
7963        memset(gps_methods, '\0', sizeof(gps_methods));
7964        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
7965        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
7966                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
7967        if (GPS_PROCESSING_METHOD_SIZE != count) {
7968            rc = BAD_VALUE;
7969        }
7970    }
7971
7972    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
7973        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
7974        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
7975                gps_timestamp)) {
7976            rc = BAD_VALUE;
7977        }
7978    }
7979
7980    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7981        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
7982        cam_rotation_info_t rotation_info;
7983        if (orientation == 0) {
7984           rotation_info.rotation = ROTATE_0;
7985        } else if (orientation == 90) {
7986           rotation_info.rotation = ROTATE_90;
7987        } else if (orientation == 180) {
7988           rotation_info.rotation = ROTATE_180;
7989        } else if (orientation == 270) {
7990           rotation_info.rotation = ROTATE_270;
7991        }
7992        rotation_info.streamId = snapshotStreamId;
7993        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
7994        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
7995            rc = BAD_VALUE;
7996        }
7997    }
7998
7999    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8000        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8001        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8002            rc = BAD_VALUE;
8003        }
8004    }
8005
8006    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8007        uint32_t thumb_quality = (uint32_t)
8008                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8009        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8010                thumb_quality)) {
8011            rc = BAD_VALUE;
8012        }
8013    }
8014
8015    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8016        cam_dimension_t dim;
8017        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8018        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8019        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8020            rc = BAD_VALUE;
8021        }
8022    }
8023
8024    // Internal metadata
8025    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8026        size_t count = 0;
8027        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8028        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8029                privatedata.data.i32, privatedata.count, count);
8030        if (privatedata.count != count) {
8031            rc = BAD_VALUE;
8032        }
8033    }
8034
8035    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8036        uint8_t* use_av_timer =
8037                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8038        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8039            rc = BAD_VALUE;
8040        }
8041    }
8042
8043    // EV step
8044    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8045            gCamCapability[mCameraId]->exp_compensation_step)) {
8046        rc = BAD_VALUE;
8047    }
8048
8049    // CDS info
8050    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8051        cam_cds_data_t *cdsData = (cam_cds_data_t *)
8052                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8053
8054        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8055                CAM_INTF_META_CDS_DATA, *cdsData)) {
8056            rc = BAD_VALUE;
8057        }
8058    }
8059
8060    return rc;
8061}
8062
8063/*===========================================================================
8064 * FUNCTION   : captureResultCb
8065 *
8066 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8067 *
8068 * PARAMETERS :
8069 *   @frame  : frame information from mm-camera-interface
8070 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8071 *   @userdata: userdata
8072 *
8073 * RETURN     : NONE
8074 *==========================================================================*/
8075void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8076                camera3_stream_buffer_t *buffer,
8077                uint32_t frame_number, void *userdata)
8078{
8079    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8080    if (hw == NULL) {
8081        ALOGE("%s: Invalid hw %p", __func__, hw);
8082        return;
8083    }
8084
8085    hw->captureResultCb(metadata, buffer, frame_number);
8086    return;
8087}
8088
8089
8090/*===========================================================================
8091 * FUNCTION   : initialize
8092 *
8093 * DESCRIPTION: Pass framework callback pointers to HAL
8094 *
8095 * PARAMETERS :
8096 *
8097 *
8098 * RETURN     : Success : 0
8099 *              Failure: -ENODEV
8100 *==========================================================================*/
8101
8102int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8103                                  const camera3_callback_ops_t *callback_ops)
8104{
8105    CDBG("%s: E", __func__);
8106    QCamera3HardwareInterface *hw =
8107        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8108    if (!hw) {
8109        ALOGE("%s: NULL camera device", __func__);
8110        return -ENODEV;
8111    }
8112
8113    int rc = hw->initialize(callback_ops);
8114    CDBG("%s: X", __func__);
8115    return rc;
8116}
8117
8118/*===========================================================================
8119 * FUNCTION   : configure_streams
8120 *
8121 * DESCRIPTION:
8122 *
8123 * PARAMETERS :
8124 *
8125 *
8126 * RETURN     : Success: 0
8127 *              Failure: -EINVAL (if stream configuration is invalid)
8128 *                       -ENODEV (fatal error)
8129 *==========================================================================*/
8130
8131int QCamera3HardwareInterface::configure_streams(
8132        const struct camera3_device *device,
8133        camera3_stream_configuration_t *stream_list)
8134{
8135    CDBG("%s: E", __func__);
8136    QCamera3HardwareInterface *hw =
8137        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8138    if (!hw) {
8139        ALOGE("%s: NULL camera device", __func__);
8140        return -ENODEV;
8141    }
8142    int rc = hw->configureStreams(stream_list);
8143    CDBG("%s: X", __func__);
8144    return rc;
8145}
8146
8147/*===========================================================================
8148 * FUNCTION   : construct_default_request_settings
8149 *
8150 * DESCRIPTION: Configure a settings buffer to meet the required use case
8151 *
8152 * PARAMETERS :
8153 *
8154 *
8155 * RETURN     : Success: Return valid metadata
8156 *              Failure: Return NULL
8157 *==========================================================================*/
8158const camera_metadata_t* QCamera3HardwareInterface::
8159    construct_default_request_settings(const struct camera3_device *device,
8160                                        int type)
8161{
8162
8163    CDBG("%s: E", __func__);
8164    camera_metadata_t* fwk_metadata = NULL;
8165    QCamera3HardwareInterface *hw =
8166        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8167    if (!hw) {
8168        ALOGE("%s: NULL camera device", __func__);
8169        return NULL;
8170    }
8171
8172    fwk_metadata = hw->translateCapabilityToMetadata(type);
8173
8174    CDBG("%s: X", __func__);
8175    return fwk_metadata;
8176}
8177
8178/*===========================================================================
8179 * FUNCTION   : process_capture_request
8180 *
8181 * DESCRIPTION:
8182 *
8183 * PARAMETERS :
8184 *
8185 *
8186 * RETURN     :
8187 *==========================================================================*/
8188int QCamera3HardwareInterface::process_capture_request(
8189                    const struct camera3_device *device,
8190                    camera3_capture_request_t *request)
8191{
8192    CDBG("%s: E", __func__);
8193    QCamera3HardwareInterface *hw =
8194        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8195    if (!hw) {
8196        ALOGE("%s: NULL camera device", __func__);
8197        return -EINVAL;
8198    }
8199
8200    int rc = hw->processCaptureRequest(request);
8201    CDBG("%s: X", __func__);
8202    return rc;
8203}
8204
8205/*===========================================================================
8206 * FUNCTION   : dump
8207 *
8208 * DESCRIPTION:
8209 *
8210 * PARAMETERS :
8211 *
8212 *
8213 * RETURN     :
8214 *==========================================================================*/
8215
8216void QCamera3HardwareInterface::dump(
8217                const struct camera3_device *device, int fd)
8218{
8219    /* Log level property is read when "adb shell dumpsys media.camera" is
8220       called so that the log level can be controlled without restarting
8221       the media server */
8222    getLogLevel();
8223
8224    CDBG("%s: E", __func__);
8225    QCamera3HardwareInterface *hw =
8226        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8227    if (!hw) {
8228        ALOGE("%s: NULL camera device", __func__);
8229        return;
8230    }
8231
8232    hw->dump(fd);
8233    CDBG("%s: X", __func__);
8234    return;
8235}
8236
8237/*===========================================================================
8238 * FUNCTION   : flush
8239 *
8240 * DESCRIPTION:
8241 *
8242 * PARAMETERS :
8243 *
8244 *
8245 * RETURN     :
8246 *==========================================================================*/
8247
8248int QCamera3HardwareInterface::flush(
8249                const struct camera3_device *device)
8250{
8251    int rc;
8252    CDBG("%s: E", __func__);
8253    QCamera3HardwareInterface *hw =
8254        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8255    if (!hw) {
8256        ALOGE("%s: NULL camera device", __func__);
8257        return -EINVAL;
8258    }
8259
8260    rc = hw->flush();
8261    CDBG("%s: X", __func__);
8262    return rc;
8263}
8264
8265/*===========================================================================
8266 * FUNCTION   : close_camera_device
8267 *
8268 * DESCRIPTION:
8269 *
8270 * PARAMETERS :
8271 *
8272 *
8273 * RETURN     :
8274 *==========================================================================*/
8275int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8276{
8277    CDBG("%s: E", __func__);
8278    int ret = NO_ERROR;
8279    QCamera3HardwareInterface *hw =
8280        reinterpret_cast<QCamera3HardwareInterface *>(
8281            reinterpret_cast<camera3_device_t *>(device)->priv);
8282    if (!hw) {
8283        ALOGE("NULL camera device");
8284        return BAD_VALUE;
8285    }
8286    delete hw;
8287
8288    CDBG("%s: X", __func__);
8289    return ret;
8290}
8291
8292/*===========================================================================
8293 * FUNCTION   : getWaveletDenoiseProcessPlate
8294 *
8295 * DESCRIPTION: query wavelet denoise process plate
8296 *
8297 * PARAMETERS : None
8298 *
8299 * RETURN     : WNR prcocess plate value
8300 *==========================================================================*/
8301cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8302{
8303    char prop[PROPERTY_VALUE_MAX];
8304    memset(prop, 0, sizeof(prop));
8305    property_get("persist.denoise.process.plates", prop, "0");
8306    int processPlate = atoi(prop);
8307    switch(processPlate) {
8308    case 0:
8309        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8310    case 1:
8311        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8312    case 2:
8313        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8314    case 3:
8315        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8316    default:
8317        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8318    }
8319}
8320
8321
8322/*===========================================================================
8323 * FUNCTION   : getTemporalDenoiseProcessPlate
8324 *
8325 * DESCRIPTION: query temporal denoise process plate
8326 *
8327 * PARAMETERS : None
8328 *
8329 * RETURN     : TNR prcocess plate value
8330 *==========================================================================*/
8331cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
8332{
8333    char prop[PROPERTY_VALUE_MAX];
8334    memset(prop, 0, sizeof(prop));
8335    property_get("persist.tnr.process.plates", prop, "0");
8336    int processPlate = atoi(prop);
8337    switch(processPlate) {
8338    case 0:
8339        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8340    case 1:
8341        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8342    case 2:
8343        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8344    case 3:
8345        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8346    default:
8347        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8348    }
8349}
8350
8351
8352/*===========================================================================
8353 * FUNCTION   : extractSceneMode
8354 *
8355 * DESCRIPTION: Extract scene mode from frameworks set metadata
8356 *
8357 * PARAMETERS :
8358 *      @frame_settings: CameraMetadata reference
8359 *      @metaMode: ANDROID_CONTORL_MODE
8360 *      @hal_metadata: hal metadata structure
8361 *
8362 * RETURN     : None
8363 *==========================================================================*/
8364int32_t QCamera3HardwareInterface::extractSceneMode(
8365        const CameraMetadata &frame_settings, uint8_t metaMode,
8366        metadata_buffer_t *hal_metadata)
8367{
8368    int32_t rc = NO_ERROR;
8369
8370    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
8371        camera_metadata_ro_entry entry =
8372                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
8373        if (0 == entry.count)
8374            return rc;
8375
8376        uint8_t fwk_sceneMode = entry.data.u8[0];
8377
8378        int val = lookupHalName(SCENE_MODES_MAP,
8379                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
8380                fwk_sceneMode);
8381        if (NAME_NOT_FOUND != val) {
8382            uint8_t sceneMode = (uint8_t)val;
8383            CDBG("%s: sceneMode: %d", __func__, sceneMode);
8384            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8385                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8386                rc = BAD_VALUE;
8387            }
8388        }
8389    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
8390            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
8391        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
8392        CDBG("%s: sceneMode: %d", __func__, sceneMode);
8393        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8394                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8395            rc = BAD_VALUE;
8396        }
8397    }
8398    return rc;
8399}
8400
8401/*===========================================================================
8402 * FUNCTION   : needRotationReprocess
8403 *
8404 * DESCRIPTION: if rotation needs to be done by reprocess in pp
8405 *
8406 * PARAMETERS : none
8407 *
8408 * RETURN     : true: needed
8409 *              false: no need
8410 *==========================================================================*/
8411bool QCamera3HardwareInterface::needRotationReprocess()
8412{
8413    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
8414        // current rotation is not zero, and pp has the capability to process rotation
8415        CDBG_HIGH("%s: need do reprocess for rotation", __func__);
8416        return true;
8417    }
8418
8419    return false;
8420}
8421
8422/*===========================================================================
8423 * FUNCTION   : needReprocess
8424 *
8425 * DESCRIPTION: if reprocess in needed
8426 *
8427 * PARAMETERS : none
8428 *
8429 * RETURN     : true: needed
8430 *              false: no need
8431 *==========================================================================*/
8432bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
8433{
8434    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
8435        // TODO: add for ZSL HDR later
8436        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
8437        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
8438            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
8439            return true;
8440        } else {
8441            CDBG_HIGH("%s: already post processed frame", __func__);
8442            return false;
8443        }
8444    }
8445    return needRotationReprocess();
8446}
8447
8448/*===========================================================================
8449 * FUNCTION   : needJpegRotation
8450 *
8451 * DESCRIPTION: if rotation from jpeg is needed
8452 *
8453 * PARAMETERS : none
8454 *
8455 * RETURN     : true: needed
8456 *              false: no need
8457 *==========================================================================*/
8458bool QCamera3HardwareInterface::needJpegRotation()
8459{
8460   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
8461    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
8462       CDBG("%s: Need Jpeg to do the rotation", __func__);
8463       return true;
8464    }
8465    return false;
8466}
8467
8468/*===========================================================================
8469 * FUNCTION   : addOfflineReprocChannel
8470 *
8471 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
8472 *              coming from input channel
8473 *
8474 * PARAMETERS :
8475 *   @config  : reprocess configuration
8476 *   @inputChHandle : pointer to the input (source) channel
8477 *
8478 *
8479 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
8480 *==========================================================================*/
8481QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
8482        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
8483{
8484    int32_t rc = NO_ERROR;
8485    QCamera3ReprocessChannel *pChannel = NULL;
8486
8487    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
8488            mCameraHandle->ops, NULL, config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
8489    if (NULL == pChannel) {
8490        ALOGE("%s: no mem for reprocess channel", __func__);
8491        return NULL;
8492    }
8493
8494    rc = pChannel->initialize(IS_TYPE_NONE);
8495    if (rc != NO_ERROR) {
8496        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
8497        delete pChannel;
8498        return NULL;
8499    }
8500
8501    // pp feature config
8502    cam_pp_feature_config_t pp_config;
8503    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
8504
8505    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
8506
8507    rc = pChannel->addReprocStreamsFromSource(pp_config,
8508            config,
8509            IS_TYPE_NONE,
8510            mMetadataChannel);
8511
8512    if (rc != NO_ERROR) {
8513        delete pChannel;
8514        return NULL;
8515    }
8516    return pChannel;
8517}
8518
8519/*===========================================================================
8520 * FUNCTION   : getMobicatMask
8521 *
8522 * DESCRIPTION: returns mobicat mask
8523 *
8524 * PARAMETERS : none
8525 *
8526 * RETURN     : mobicat mask
8527 *
8528 *==========================================================================*/
8529uint8_t QCamera3HardwareInterface::getMobicatMask()
8530{
8531    return m_MobicatMask;
8532}
8533
8534/*===========================================================================
8535 * FUNCTION   : setMobicat
8536 *
8537 * DESCRIPTION: set Mobicat on/off.
8538 *
8539 * PARAMETERS :
8540 *   @params  : none
8541 *
8542 * RETURN     : int32_t type of status
8543 *              NO_ERROR  -- success
8544 *              none-zero failure code
8545 *==========================================================================*/
8546int32_t QCamera3HardwareInterface::setMobicat()
8547{
8548    char value [PROPERTY_VALUE_MAX];
8549    property_get("persist.camera.mobicat", value, "0");
8550    int32_t ret = NO_ERROR;
8551    uint8_t enableMobi = (uint8_t)atoi(value);
8552
8553    if (enableMobi) {
8554        tune_cmd_t tune_cmd;
8555        tune_cmd.type = SET_RELOAD_CHROMATIX;
8556        tune_cmd.module = MODULE_ALL;
8557        tune_cmd.value = TRUE;
8558        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8559                CAM_INTF_PARM_SET_VFE_COMMAND,
8560                tune_cmd);
8561
8562        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8563                CAM_INTF_PARM_SET_PP_COMMAND,
8564                tune_cmd);
8565    }
8566    m_MobicatMask = enableMobi;
8567
8568    return ret;
8569}
8570
8571/*===========================================================================
8572* FUNCTION   : getLogLevel
8573*
8574* DESCRIPTION: Reads the log level property into a variable
8575*
8576* PARAMETERS :
8577*   None
8578*
8579* RETURN     :
8580*   None
8581*==========================================================================*/
8582void QCamera3HardwareInterface::getLogLevel()
8583{
8584    char prop[PROPERTY_VALUE_MAX];
8585    uint32_t globalLogLevel = 0;
8586
8587    property_get("persist.camera.hal.debug", prop, "0");
8588    int val = atoi(prop);
8589    if (0 <= val) {
8590        gCamHal3LogLevel = (uint32_t)val;
8591    }
8592    property_get("persist.camera.global.debug", prop, "0");
8593    val = atoi(prop);
8594    if (0 <= val) {
8595        globalLogLevel = (uint32_t)val;
8596    }
8597
8598    /* Highest log level among hal.logs and global.logs is selected */
8599    if (gCamHal3LogLevel < globalLogLevel)
8600        gCamHal3LogLevel = globalLogLevel;
8601
8602    return;
8603}
8604
8605/*===========================================================================
8606 * FUNCTION   : validateStreamRotations
8607 *
8608 * DESCRIPTION: Check if the rotations requested are supported
8609 *
8610 * PARAMETERS :
8611 *   @stream_list : streams to be configured
8612 *
8613 * RETURN     : NO_ERROR on success
8614 *              -EINVAL on failure
8615 *
8616 *==========================================================================*/
8617int QCamera3HardwareInterface::validateStreamRotations(
8618        camera3_stream_configuration_t *streamList)
8619{
8620    int rc = NO_ERROR;
8621
8622    /*
8623    * Loop through all streams requested in configuration
8624    * Check if unsupported rotations have been requested on any of them
8625    */
8626    for (size_t j = 0; j < streamList->num_streams; j++){
8627        camera3_stream_t *newStream = streamList->streams[j];
8628
8629        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
8630        bool isImplDef = (newStream->format ==
8631                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
8632        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
8633                isImplDef);
8634
8635        if (isRotated && (!isImplDef || isZsl)) {
8636            ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
8637                    "type:%d and stream format:%d", __func__,
8638                    newStream->rotation, newStream->stream_type,
8639                    newStream->format);
8640            rc = -EINVAL;
8641            break;
8642        }
8643    }
8644    return rc;
8645}
8646
8647/*===========================================================================
8648* FUNCTION   : getFlashInfo
8649*
8650* DESCRIPTION: Retrieve information about whether the device has a flash.
8651*
8652* PARAMETERS :
8653*   @cameraId  : Camera id to query
8654*   @hasFlash  : Boolean indicating whether there is a flash device
8655*                associated with given camera
8656*   @flashNode : If a flash device exists, this will be its device node.
8657*
8658* RETURN     :
8659*   None
8660*==========================================================================*/
8661void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
8662        bool& hasFlash,
8663        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
8664{
8665    cam_capability_t* camCapability = gCamCapability[cameraId];
8666    if (NULL == camCapability) {
8667        hasFlash = false;
8668        flashNode[0] = '\0';
8669    } else {
8670        hasFlash = camCapability->flash_available;
8671        strlcpy(flashNode,
8672                (char*)camCapability->flash_dev_name,
8673                QCAMERA_MAX_FILEPATH_LENGTH);
8674    }
8675}
8676
8677/*===========================================================================
8678* FUNCTION   : getEepromVersionInfo
8679*
8680* DESCRIPTION: Retrieve version info of the sensor EEPROM data
8681*
8682* PARAMETERS : None
8683*
8684* RETURN     : string describing EEPROM version
8685*              "\0" if no such info available
8686*==========================================================================*/
8687const char *QCamera3HardwareInterface::getEepromVersionInfo()
8688{
8689    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
8690}
8691
8692/*===========================================================================
8693* FUNCTION   : getLdafCalib
8694*
8695* DESCRIPTION: Retrieve Laser AF calibration data
8696*
8697* PARAMETERS : None
8698*
8699* RETURN     : Two uint32_t describing laser AF calibration data
8700*              NULL if none is available.
8701*==========================================================================*/
8702const uint32_t *QCamera3HardwareInterface::getLdafCalib()
8703{
8704    if (mLdafCalibExist) {
8705        return &mLdafCalib[0];
8706    } else {
8707        return NULL;
8708    }
8709}
8710
8711/*===========================================================================
8712 * FUNCTION   : dynamicUpdateMetaStreamInfo
8713 *
8714 * DESCRIPTION: This function:
8715 *             (1) stops all the channels
8716 *             (2) returns error on pending requests and buffers
8717 *             (3) sends metastream_info in setparams
8718 *             (4) starts all channels
8719 *             This is useful when sensor has to be restarted to apply any
8720 *             settings such as frame rate from a different sensor mode
8721 *
8722 * PARAMETERS : None
8723 *
8724 * RETURN     : NO_ERROR on success
8725 *              Error codes on failure
8726 *
8727 *==========================================================================*/
8728int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
8729{
8730    ATRACE_CALL();
8731    int rc = NO_ERROR;
8732
8733    CDBG("%s: E", __func__);
8734
8735    rc = stopAllChannels();
8736    if (rc < 0) {
8737        ALOGE("%s: stopAllChannels failed", __func__);
8738        return rc;
8739    }
8740
8741    rc = notifyErrorForPendingRequests();
8742    if (rc < 0) {
8743        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
8744        return rc;
8745    }
8746
8747    /* Send meta stream info once again so that ISP can start */
8748    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8749            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
8750    CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
8751    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
8752            mParameters);
8753    if (rc < 0) {
8754        ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
8755                __func__);
8756    }
8757
8758    rc = startAllChannels();
8759    if (rc < 0) {
8760        ALOGE("%s: startAllChannels failed", __func__);
8761        return rc;
8762    }
8763
8764    CDBG("%s:%d X", __func__, __LINE__);
8765    return rc;
8766}
8767
8768/*===========================================================================
8769 * FUNCTION   : stopAllChannels
8770 *
8771 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
8772 *
8773 * PARAMETERS : None
8774 *
8775 * RETURN     : NO_ERROR on success
8776 *              Error codes on failure
8777 *
8778 *==========================================================================*/
8779int32_t QCamera3HardwareInterface::stopAllChannels()
8780{
8781    int32_t rc = NO_ERROR;
8782
8783    // Stop the Streams/Channels
8784    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8785        it != mStreamInfo.end(); it++) {
8786        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
8787        channel->stop();
8788        (*it)->status = INVALID;
8789    }
8790
8791    if (mSupportChannel) {
8792        mSupportChannel->stop();
8793    }
8794    if (mAnalysisChannel) {
8795        mAnalysisChannel->stop();
8796    }
8797    if (mRawDumpChannel) {
8798        mRawDumpChannel->stop();
8799    }
8800    if (mMetadataChannel) {
8801        /* If content of mStreamInfo is not 0, there is metadata stream */
8802        mMetadataChannel->stop();
8803    }
8804
8805    CDBG("%s:%d All channels stopped", __func__, __LINE__);
8806    return rc;
8807}
8808
8809/*===========================================================================
8810 * FUNCTION   : startAllChannels
8811 *
8812 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
8813 *
8814 * PARAMETERS : None
8815 *
8816 * RETURN     : NO_ERROR on success
8817 *              Error codes on failure
8818 *
8819 *==========================================================================*/
8820int32_t QCamera3HardwareInterface::startAllChannels()
8821{
8822    int32_t rc = NO_ERROR;
8823
8824    CDBG("%s: Start all channels ", __func__);
8825    // Start the Streams/Channels
8826    if (mMetadataChannel) {
8827        /* If content of mStreamInfo is not 0, there is metadata stream */
8828        rc = mMetadataChannel->start();
8829        if (rc < 0) {
8830            ALOGE("%s: META channel start failed", __func__);
8831            return rc;
8832        }
8833    }
8834    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8835        it != mStreamInfo.end(); it++) {
8836        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
8837        rc = channel->start();
8838        if (rc < 0) {
8839            ALOGE("%s: channel start failed", __func__);
8840            return rc;
8841        }
8842    }
8843    if (mAnalysisChannel) {
8844        mAnalysisChannel->start();
8845    }
8846    if (mSupportChannel) {
8847        rc = mSupportChannel->start();
8848        if (rc < 0) {
8849            ALOGE("%s: Support channel start failed", __func__);
8850            return rc;
8851        }
8852    }
8853    if (mRawDumpChannel) {
8854        rc = mRawDumpChannel->start();
8855        if (rc < 0) {
8856            ALOGE("%s: RAW dump channel start failed", __func__);
8857            return rc;
8858        }
8859    }
8860
8861    CDBG("%s:%d All channels started", __func__, __LINE__);
8862    return rc;
8863}
8864
8865/*===========================================================================
8866 * FUNCTION   : notifyErrorForPendingRequests
8867 *
8868 * DESCRIPTION: This function sends error for all the pending requests/buffers
8869 *
8870 * PARAMETERS : None
8871 *
8872 * RETURN     : Error codes
8873 *              NO_ERROR on success
8874 *
8875 *==========================================================================*/
8876int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
8877{
8878    int32_t rc = NO_ERROR;
8879    unsigned int frameNum = 0;
8880    camera3_capture_result_t result;
8881    camera3_stream_buffer_t *pStream_Buf = NULL;
8882    FlushMap flushMap;
8883
8884    memset(&result, 0, sizeof(camera3_capture_result_t));
8885
8886    if (mPendingRequestsList.size() > 0) {
8887        pendingRequestIterator i = mPendingRequestsList.begin();
8888        frameNum = i->frame_number;
8889    } else {
8890        /* There might still be pending buffers even though there are
8891         no pending requests. Setting the frameNum to MAX so that
8892         all the buffers with smaller frame numbers are returned */
8893        frameNum = UINT_MAX;
8894    }
8895
8896    CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
8897      __func__, frameNum);
8898
8899    // Go through the pending buffers and group them depending
8900    // on frame number
8901    for (List<PendingBufferInfo>::iterator k =
8902            mPendingBuffersMap.mPendingBufferList.begin();
8903            k != mPendingBuffersMap.mPendingBufferList.end();) {
8904
8905        if (k->frame_number < frameNum) {
8906            ssize_t idx = flushMap.indexOfKey(k->frame_number);
8907            if (idx == NAME_NOT_FOUND) {
8908                Vector<PendingBufferInfo> pending;
8909                pending.add(*k);
8910                flushMap.add(k->frame_number, pending);
8911            } else {
8912                Vector<PendingBufferInfo> &pending =
8913                        flushMap.editValueFor(k->frame_number);
8914                pending.add(*k);
8915            }
8916
8917            mPendingBuffersMap.num_buffers--;
8918            k = mPendingBuffersMap.mPendingBufferList.erase(k);
8919        } else {
8920            k++;
8921        }
8922    }
8923
8924    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
8925        uint32_t frame_number = flushMap.keyAt(iFlush);
8926        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
8927
8928        // Send Error notify to frameworks for each buffer for which
8929        // metadata buffer is already sent
8930        CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
8931          __func__, frame_number, pending.size());
8932
8933        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
8934        if (NULL == pStream_Buf) {
8935            ALOGE("%s: No memory for pending buffers array", __func__);
8936            return NO_MEMORY;
8937        }
8938        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
8939
8940        for (size_t j = 0; j < pending.size(); j++) {
8941            const PendingBufferInfo &info = pending.itemAt(j);
8942            camera3_notify_msg_t notify_msg;
8943            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
8944            notify_msg.type = CAMERA3_MSG_ERROR;
8945            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
8946            notify_msg.message.error.error_stream = info.stream;
8947            notify_msg.message.error.frame_number = frame_number;
8948            pStream_Buf[j].acquire_fence = -1;
8949            pStream_Buf[j].release_fence = -1;
8950            pStream_Buf[j].buffer = info.buffer;
8951            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
8952            pStream_Buf[j].stream = info.stream;
8953            mCallbackOps->notify(mCallbackOps, &notify_msg);
8954            CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
8955                    frame_number, info.stream);
8956        }
8957
8958        result.result = NULL;
8959        result.frame_number = frame_number;
8960        result.num_output_buffers = (uint32_t)pending.size();
8961        result.output_buffers = pStream_Buf;
8962        mCallbackOps->process_capture_result(mCallbackOps, &result);
8963
8964        delete [] pStream_Buf;
8965    }
8966
8967    CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
8968
8969    flushMap.clear();
8970    for (List<PendingBufferInfo>::iterator k =
8971            mPendingBuffersMap.mPendingBufferList.begin();
8972            k != mPendingBuffersMap.mPendingBufferList.end();) {
8973        ssize_t idx = flushMap.indexOfKey(k->frame_number);
8974        if (idx == NAME_NOT_FOUND) {
8975            Vector<PendingBufferInfo> pending;
8976            pending.add(*k);
8977            flushMap.add(k->frame_number, pending);
8978        } else {
8979            Vector<PendingBufferInfo> &pending =
8980                    flushMap.editValueFor(k->frame_number);
8981            pending.add(*k);
8982        }
8983
8984        mPendingBuffersMap.num_buffers--;
8985        k = mPendingBuffersMap.mPendingBufferList.erase(k);
8986    }
8987
8988    pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
8989
8990    // Go through the pending requests info and send error request to framework
8991    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
8992        uint32_t frame_number = flushMap.keyAt(iFlush);
8993        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
8994        CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
8995              __func__, frame_number);
8996
8997        // Send shutter notify to frameworks
8998        camera3_notify_msg_t notify_msg;
8999        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9000        notify_msg.type = CAMERA3_MSG_ERROR;
9001        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9002        notify_msg.message.error.error_stream = NULL;
9003        notify_msg.message.error.frame_number = frame_number;
9004        mCallbackOps->notify(mCallbackOps, &notify_msg);
9005
9006        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9007        if (NULL == pStream_Buf) {
9008            ALOGE("%s: No memory for pending buffers array", __func__);
9009            return NO_MEMORY;
9010        }
9011        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9012
9013        for (size_t j = 0; j < pending.size(); j++) {
9014            const PendingBufferInfo &info = pending.itemAt(j);
9015            pStream_Buf[j].acquire_fence = -1;
9016            pStream_Buf[j].release_fence = -1;
9017            pStream_Buf[j].buffer = info.buffer;
9018            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9019            pStream_Buf[j].stream = info.stream;
9020        }
9021
9022        result.input_buffer = i->input_buffer;
9023        result.num_output_buffers = (uint32_t)pending.size();
9024        result.output_buffers = pStream_Buf;
9025        result.result = NULL;
9026        result.frame_number = frame_number;
9027        mCallbackOps->process_capture_result(mCallbackOps, &result);
9028        delete [] pStream_Buf;
9029        i = erasePendingRequest(i);
9030    }
9031
9032    /* Reset pending frame Drop list and requests list */
9033    mPendingFrameDropList.clear();
9034
9035    flushMap.clear();
9036    mPendingBuffersMap.num_buffers = 0;
9037    mPendingBuffersMap.mPendingBufferList.clear();
9038    mPendingReprocessResultList.clear();
9039    CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9040
9041    return rc;
9042}
9043
9044bool QCamera3HardwareInterface::isOnEncoder(
9045        const cam_dimension_t max_viewfinder_size,
9046        uint32_t width, uint32_t height)
9047{
9048    return (width > (uint32_t)max_viewfinder_size.width ||
9049            height > (uint32_t)max_viewfinder_size.height);
9050}
9051
9052}; //end namespace qcamera
9053