QCamera3HWI.cpp revision ecc5ece9eff7c7855faf676881d0aba59c1bd1db
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define ATRACE_TAG ATRACE_TAG_CAMERA
31#define LOG_TAG "QCamera3HWI"
32//#define LOG_NDEBUG 0
33
34#define __STDC_LIMIT_MACROS
35#include <cutils/properties.h>
36#include <hardware/camera3.h>
37#include <camera/CameraMetadata.h>
38#include <stdio.h>
39#include <stdlib.h>
40#include <fcntl.h>
41#include <stdint.h>
42#include <utils/Log.h>
43#include <utils/Errors.h>
44#include <utils/Trace.h>
45#include <sync/sync.h>
46#include <gralloc_priv.h>
47#include "util/QCameraFlash.h"
48#include "QCamera3HWI.h"
49#include "QCamera3Mem.h"
50#include "QCamera3Channel.h"
51#include "QCamera3PostProc.h"
52#include "QCamera3VendorTags.h"
53
54using namespace android;
55
56namespace qcamera {
57
58#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
59
60#define EMPTY_PIPELINE_DELAY 2
61#define PARTIAL_RESULT_COUNT 2
62#define FRAME_SKIP_DELAY     0
63#define CAM_MAX_SYNC_LATENCY 4
64
65#define MAX_VALUE_8BIT ((1<<8)-1)
66#define MAX_VALUE_10BIT ((1<<10)-1)
67#define MAX_VALUE_12BIT ((1<<12)-1)
68
69#define VIDEO_4K_WIDTH  3840
70#define VIDEO_4K_HEIGHT 2160
71
72#define MAX_EIS_WIDTH 1920
73#define MAX_EIS_HEIGHT 1080
74
75#define MAX_RAW_STREAMS        1
76#define MAX_STALLING_STREAMS   1
77#define MAX_PROCESSED_STREAMS  3
78/* Batch mode is enabled only if FPS set is equal to or greater than this */
79#define MIN_FPS_FOR_BATCH_MODE (120)
80#define PREVIEW_FPS_FOR_HFR    (30)
81#define DEFAULT_VIDEO_FPS      (30.0)
82#define MAX_HFR_BATCH_SIZE     (8)
83#define REGIONS_TUPLE_COUNT    5
84#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
85
86#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
87
88#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
89                                              CAM_QCOM_FEATURE_CROP |\
90                                              CAM_QCOM_FEATURE_ROTATION |\
91                                              CAM_QCOM_FEATURE_SHARPNESS |\
92                                              CAM_QCOM_FEATURE_SCALE |\
93                                              CAM_QCOM_FEATURE_CAC |\
94                                              CAM_QCOM_FEATURE_CDS )
95
96#define TIMEOUT_NEVER -1
97
98cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
99const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
100static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
101volatile uint32_t gCamHal3LogLevel = 1;
102
103const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
104    {"On",  CAM_CDS_MODE_ON},
105    {"Off", CAM_CDS_MODE_OFF},
106    {"Auto",CAM_CDS_MODE_AUTO}
107};
108
109const QCamera3HardwareInterface::QCameraMap<
110        camera_metadata_enum_android_control_effect_mode_t,
111        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
112    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
113    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
114    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
115    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
116    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
117    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
118    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
119    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
120    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
121};
122
123const QCamera3HardwareInterface::QCameraMap<
124        camera_metadata_enum_android_control_awb_mode_t,
125        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
126    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
127    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
128    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
129    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
130    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
131    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
132    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
133    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
134    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
135};
136
137const QCamera3HardwareInterface::QCameraMap<
138        camera_metadata_enum_android_control_scene_mode_t,
139        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
140    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
141    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
142    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
143    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
144    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
145    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
146    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
147    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
148    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
149    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
150    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
151    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
152    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
153    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
154    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
155    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
156};
157
158const QCamera3HardwareInterface::QCameraMap<
159        camera_metadata_enum_android_control_af_mode_t,
160        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
161    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
162    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
163    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
164    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
165    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
166    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
167    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
168};
169
170const QCamera3HardwareInterface::QCameraMap<
171        camera_metadata_enum_android_color_correction_aberration_mode_t,
172        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
173    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
174            CAM_COLOR_CORRECTION_ABERRATION_OFF },
175    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
176            CAM_COLOR_CORRECTION_ABERRATION_FAST },
177    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
178            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
179};
180
181const QCamera3HardwareInterface::QCameraMap<
182        camera_metadata_enum_android_control_ae_antibanding_mode_t,
183        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
184    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
185    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
186    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
187    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
188};
189
190const QCamera3HardwareInterface::QCameraMap<
191        camera_metadata_enum_android_control_ae_mode_t,
192        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
193    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
194    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
195    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
196    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
197    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
198};
199
200const QCamera3HardwareInterface::QCameraMap<
201        camera_metadata_enum_android_flash_mode_t,
202        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
203    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
204    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
205    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209        camera_metadata_enum_android_statistics_face_detect_mode_t,
210        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
211    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
212    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
213    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
214};
215
216const QCamera3HardwareInterface::QCameraMap<
217        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
218        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
219    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
220      CAM_FOCUS_UNCALIBRATED },
221    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
222      CAM_FOCUS_APPROXIMATE },
223    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
224      CAM_FOCUS_CALIBRATED }
225};
226
227const QCamera3HardwareInterface::QCameraMap<
228        camera_metadata_enum_android_lens_state_t,
229        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
230    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
231    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
232};
233
234const int32_t available_thumbnail_sizes[] = {0, 0,
235                                             176, 144,
236                                             320, 240,
237                                             432, 288,
238                                             480, 288,
239                                             512, 288,
240                                             512, 384};
241
242const QCamera3HardwareInterface::QCameraMap<
243        camera_metadata_enum_android_sensor_test_pattern_mode_t,
244        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
245    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
246    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
247    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
248    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
249    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
250};
251
252/* Since there is no mapping for all the options some Android enum are not listed.
253 * Also, the order in this list is important because while mapping from HAL to Android it will
254 * traverse from lower to higher index which means that for HAL values that are map to different
255 * Android values, the traverse logic will select the first one found.
256 */
257const QCamera3HardwareInterface::QCameraMap<
258        camera_metadata_enum_android_sensor_reference_illuminant1_t,
259        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
260    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
261    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
262    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
263    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
264    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
265    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
266    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
267    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
268    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
269    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
270    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
271    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
272    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
273    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
276};
277
278const QCamera3HardwareInterface::QCameraMap<
279        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
280    { 60, CAM_HFR_MODE_60FPS},
281    { 90, CAM_HFR_MODE_90FPS},
282    { 120, CAM_HFR_MODE_120FPS},
283    { 150, CAM_HFR_MODE_150FPS},
284    { 180, CAM_HFR_MODE_180FPS},
285    { 210, CAM_HFR_MODE_210FPS},
286    { 240, CAM_HFR_MODE_240FPS},
287    { 480, CAM_HFR_MODE_480FPS},
288};
289
290camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
291    initialize:                         QCamera3HardwareInterface::initialize,
292    configure_streams:                  QCamera3HardwareInterface::configure_streams,
293    register_stream_buffers:            NULL,
294    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
295    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
296    get_metadata_vendor_tag_ops:        NULL,
297    dump:                               QCamera3HardwareInterface::dump,
298    flush:                              QCamera3HardwareInterface::flush,
299    reserved:                           {0},
300};
301
302/*===========================================================================
303 * FUNCTION   : QCamera3HardwareInterface
304 *
305 * DESCRIPTION: constructor of QCamera3HardwareInterface
306 *
307 * PARAMETERS :
308 *   @cameraId  : camera ID
309 *
310 * RETURN     : none
311 *==========================================================================*/
312QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
313        const camera_module_callbacks_t *callbacks)
314    : mCameraId(cameraId),
315      mCameraHandle(NULL),
316      mCameraOpened(false),
317      mCameraInitialized(false),
318      mCallbackOps(NULL),
319      mMetadataChannel(NULL),
320      mPictureChannel(NULL),
321      mRawChannel(NULL),
322      mSupportChannel(NULL),
323      mAnalysisChannel(NULL),
324      mRawDumpChannel(NULL),
325      mDummyBatchChannel(NULL),
326      mChannelHandle(0),
327      mFirstRequest(false),
328      mFirstConfiguration(true),
329      mFlush(false),
330      mParamHeap(NULL),
331      mParameters(NULL),
332      mPrevParameters(NULL),
333      m_bIsVideo(false),
334      m_bIs4KVideo(false),
335      m_bEisSupportedSize(false),
336      m_bEisEnable(false),
337      m_MobicatMask(0),
338      mMinProcessedFrameDuration(0),
339      mMinJpegFrameDuration(0),
340      mMinRawFrameDuration(0),
341      mMetaFrameCount(0U),
342      mUpdateDebugLevel(false),
343      mCallbacks(callbacks),
344      mCaptureIntent(0),
345      mBatchSize(0),
346      mToBeQueuedVidBufs(0),
347      mHFRVideoFps(DEFAULT_VIDEO_FPS),
348      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
349      mFirstFrameNumberInBatch(0),
350      mNeedSensorRestart(false),
351      mLdafCalibExist(false),
352      mPowerHintEnabled(false),
353      mLastCustIntentFrmNum(-1)
354{
355    getLogLevel();
356    m_perfLock.lock_init();
357    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
358    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
359    mCameraDevice.common.close = close_camera_device;
360    mCameraDevice.ops = &mCameraOps;
361    mCameraDevice.priv = this;
362    gCamCapability[cameraId]->version = CAM_HAL_V3;
363    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
364    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
365    gCamCapability[cameraId]->min_num_pp_bufs = 3;
366    pthread_cond_init(&mRequestCond, NULL);
367    mPendingLiveRequest = 0;
368    mCurrentRequestId = -1;
369    pthread_mutex_init(&mMutex, NULL);
370
371    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
372        mDefaultMetadata[i] = NULL;
373
374    // Getting system props of different kinds
375    char prop[PROPERTY_VALUE_MAX];
376    memset(prop, 0, sizeof(prop));
377    property_get("persist.camera.raw.dump", prop, "0");
378    mEnableRawDump = atoi(prop);
379    if (mEnableRawDump)
380        CDBG("%s: Raw dump from Camera HAL enabled", __func__);
381
382    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
383    memset(mLdafCalib, 0, sizeof(mLdafCalib));
384
385    memset(prop, 0, sizeof(prop));
386    property_get("persist.camera.tnr.preview", prop, "1");
387    m_bTnrPreview = (uint8_t)atoi(prop);
388
389    memset(prop, 0, sizeof(prop));
390    property_get("persist.camera.tnr.video", prop, "1");
391    m_bTnrVideo = (uint8_t)atoi(prop);
392}
393
394/*===========================================================================
395 * FUNCTION   : ~QCamera3HardwareInterface
396 *
397 * DESCRIPTION: destructor of QCamera3HardwareInterface
398 *
399 * PARAMETERS : none
400 *
401 * RETURN     : none
402 *==========================================================================*/
403QCamera3HardwareInterface::~QCamera3HardwareInterface()
404{
405    CDBG("%s: E", __func__);
406    bool hasPendingBuffers = (mPendingBuffersMap.num_buffers > 0);
407
408    /* Turn off current power hint before acquiring perfLock in case they
409     * conflict with each other */
410    disablePowerHint();
411
412    m_perfLock.lock_acq();
413
414    /* We need to stop all streams before deleting any stream */
415    if (mRawDumpChannel) {
416        mRawDumpChannel->stop();
417    }
418
419    // NOTE: 'camera3_stream_t *' objects are already freed at
420    //        this stage by the framework
421    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
422        it != mStreamInfo.end(); it++) {
423        QCamera3ProcessingChannel *channel = (*it)->channel;
424        if (channel) {
425            channel->stop();
426        }
427    }
428    if (mSupportChannel)
429        mSupportChannel->stop();
430
431    if (mAnalysisChannel) {
432        mAnalysisChannel->stop();
433    }
434    if (mMetadataChannel) {
435        mMetadataChannel->stop();
436    }
437    if (mChannelHandle) {
438        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
439                mChannelHandle);
440        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
441    }
442
443    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
444        it != mStreamInfo.end(); it++) {
445        QCamera3ProcessingChannel *channel = (*it)->channel;
446        if (channel)
447            delete channel;
448        free (*it);
449    }
450    if (mSupportChannel) {
451        delete mSupportChannel;
452        mSupportChannel = NULL;
453    }
454
455    if (mAnalysisChannel) {
456        delete mAnalysisChannel;
457        mAnalysisChannel = NULL;
458    }
459    if (mRawDumpChannel) {
460        delete mRawDumpChannel;
461        mRawDumpChannel = NULL;
462    }
463    if (mDummyBatchChannel) {
464        delete mDummyBatchChannel;
465        mDummyBatchChannel = NULL;
466    }
467    mPictureChannel = NULL;
468
469    if (mMetadataChannel) {
470        delete mMetadataChannel;
471        mMetadataChannel = NULL;
472    }
473
474    /* Clean up all channels */
475    if (mCameraInitialized) {
476        if(!mFirstConfiguration){
477            clear_metadata_buffer(mParameters);
478
479            // Check if there is still pending buffer not yet returned.
480            if (hasPendingBuffers) {
481                uint8_t restart = TRUE;
482                ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DAEMON_RESTART,
483                        restart);
484            }
485
486            //send the last unconfigure
487            cam_stream_size_info_t stream_config_info;
488            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
489            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
490            stream_config_info.buffer_info.max_buffers =
491                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
492            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
493                    stream_config_info);
494
495            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
496            if (rc < 0) {
497                ALOGE("%s: set_parms failed for unconfigure", __func__);
498            }
499        }
500        deinitParameters();
501    }
502
503    if (mChannelHandle) {
504        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
505                mChannelHandle);
506        ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
507        mChannelHandle = 0;
508    }
509
510    if (mCameraOpened)
511        closeCamera();
512
513    mPendingBuffersMap.mPendingBufferList.clear();
514    mPendingReprocessResultList.clear();
515    for (pendingRequestIterator i = mPendingRequestsList.begin();
516            i != mPendingRequestsList.end();) {
517        i = erasePendingRequest(i);
518    }
519    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
520        if (mDefaultMetadata[i])
521            free_camera_metadata(mDefaultMetadata[i]);
522
523    m_perfLock.lock_rel();
524    m_perfLock.lock_deinit();
525
526    pthread_cond_destroy(&mRequestCond);
527
528    pthread_mutex_destroy(&mMutex);
529
530    if (hasPendingBuffers) {
531        ALOGE("%s: Not all buffers are returned. Aborting...", __func__);
532        abort();
533    }
534    CDBG("%s: X", __func__);
535}
536
537/*===========================================================================
538 * FUNCTION   : erasePendingRequest
539 *
540 * DESCRIPTION: function to erase a desired pending request after freeing any
541 *              allocated memory
542 *
543 * PARAMETERS :
544 *   @i       : iterator pointing to pending request to be erased
545 *
546 * RETURN     : iterator pointing to the next request
547 *==========================================================================*/
548QCamera3HardwareInterface::pendingRequestIterator
549        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
550{
551    if (i->input_buffer != NULL) {
552        free(i->input_buffer);
553        i->input_buffer = NULL;
554    }
555    if (i->settings != NULL)
556        free_camera_metadata((camera_metadata_t*)i->settings);
557    return mPendingRequestsList.erase(i);
558}
559
560/*===========================================================================
561 * FUNCTION   : camEvtHandle
562 *
563 * DESCRIPTION: Function registered to mm-camera-interface to handle events
564 *
565 * PARAMETERS :
566 *   @camera_handle : interface layer camera handle
567 *   @evt           : ptr to event
568 *   @user_data     : user data ptr
569 *
570 * RETURN     : none
571 *==========================================================================*/
572void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
573                                          mm_camera_event_t *evt,
574                                          void *user_data)
575{
576    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
577    if (obj && evt) {
578        switch(evt->server_event_type) {
579            case CAM_EVENT_TYPE_DAEMON_DIED:
580                ALOGE("%s: Fatal, camera daemon died", __func__);
581                //close the camera backend
582                if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
583                        && obj->mCameraHandle->ops) {
584                    obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
585                } else {
586                    ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
587                            __func__);
588                }
589                camera3_notify_msg_t notify_msg;
590                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
591                notify_msg.type = CAMERA3_MSG_ERROR;
592                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
593                notify_msg.message.error.error_stream = NULL;
594                notify_msg.message.error.frame_number = 0;
595                obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
596                break;
597
598            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
599                CDBG("%s: HAL got request pull from Daemon", __func__);
600                pthread_mutex_lock(&obj->mMutex);
601                obj->mWokenUpByDaemon = true;
602                obj->unblockRequestIfNecessary();
603                pthread_mutex_unlock(&obj->mMutex);
604                break;
605
606            default:
607                CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
608                        evt->server_event_type);
609                break;
610        }
611    } else {
612        ALOGE("%s: NULL user_data/evt", __func__);
613    }
614}
615
616/*===========================================================================
617 * FUNCTION   : openCamera
618 *
619 * DESCRIPTION: open camera
620 *
621 * PARAMETERS :
622 *   @hw_device  : double ptr for camera device struct
623 *
624 * RETURN     : int32_t type of status
625 *              NO_ERROR  -- success
626 *              none-zero failure code
627 *==========================================================================*/
628int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
629{
630    int rc = 0;
631    if (mCameraOpened) {
632        *hw_device = NULL;
633        return PERMISSION_DENIED;
634    }
635    m_perfLock.lock_acq();
636    rc = openCamera();
637    if (rc == 0) {
638        *hw_device = &mCameraDevice.common;
639    } else
640        *hw_device = NULL;
641
642    m_perfLock.lock_rel();
643    return rc;
644}
645
646/*===========================================================================
647 * FUNCTION   : openCamera
648 *
649 * DESCRIPTION: open camera
650 *
651 * PARAMETERS : none
652 *
653 * RETURN     : int32_t type of status
654 *              NO_ERROR  -- success
655 *              none-zero failure code
656 *==========================================================================*/
657int QCamera3HardwareInterface::openCamera()
658{
659    int rc = 0;
660
661    ATRACE_CALL();
662    if (mCameraHandle) {
663        ALOGE("Failure: Camera already opened");
664        return ALREADY_EXISTS;
665    }
666
667    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
668    if (rc < 0) {
669        ALOGE("%s: Failed to reserve flash for camera id: %d",
670                __func__,
671                mCameraId);
672        return UNKNOWN_ERROR;
673    }
674
675    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
676    if (rc) {
677        ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
678        return rc;
679    }
680
681    mCameraOpened = true;
682
683    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
684            camEvtHandle, (void *)this);
685
686    if (rc < 0) {
687        ALOGE("%s: Error, failed to register event callback", __func__);
688        /* Not closing camera here since it is already handled in destructor */
689        return FAILED_TRANSACTION;
690    }
691    mFirstConfiguration = true;
692    return NO_ERROR;
693}
694
695/*===========================================================================
696 * FUNCTION   : closeCamera
697 *
698 * DESCRIPTION: close camera
699 *
700 * PARAMETERS : none
701 *
702 * RETURN     : int32_t type of status
703 *              NO_ERROR  -- success
704 *              none-zero failure code
705 *==========================================================================*/
706int QCamera3HardwareInterface::closeCamera()
707{
708    ATRACE_CALL();
709    int rc = NO_ERROR;
710
711    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
712    mCameraHandle = NULL;
713    mCameraOpened = false;
714
715    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
716        CDBG("%s: Failed to release flash for camera id: %d",
717                __func__,
718                mCameraId);
719    }
720
721    return rc;
722}
723
724/*===========================================================================
725 * FUNCTION   : initialize
726 *
727 * DESCRIPTION: Initialize frameworks callback functions
728 *
729 * PARAMETERS :
730 *   @callback_ops : callback function to frameworks
731 *
732 * RETURN     :
733 *
734 *==========================================================================*/
735int QCamera3HardwareInterface::initialize(
736        const struct camera3_callback_ops *callback_ops)
737{
738    ATRACE_CALL();
739    int rc;
740
741    pthread_mutex_lock(&mMutex);
742
743    rc = initParameters();
744    if (rc < 0) {
745        ALOGE("%s: initParamters failed %d", __func__, rc);
746       goto err1;
747    }
748    mCallbackOps = callback_ops;
749
750    mChannelHandle = mCameraHandle->ops->add_channel(
751            mCameraHandle->camera_handle, NULL, NULL, this);
752    if (mChannelHandle == 0) {
753        ALOGE("%s: add_channel failed", __func__);
754        rc = -ENOMEM;
755        pthread_mutex_unlock(&mMutex);
756        return rc;
757    }
758
759    pthread_mutex_unlock(&mMutex);
760    mCameraInitialized = true;
761    return 0;
762
763err1:
764    pthread_mutex_unlock(&mMutex);
765    return rc;
766}
767
768/*===========================================================================
769 * FUNCTION   : validateStreamDimensions
770 *
771 * DESCRIPTION: Check if the configuration requested are those advertised
772 *
773 * PARAMETERS :
774 *   @stream_list : streams to be configured
775 *
776 * RETURN     :
777 *
778 *==========================================================================*/
779int QCamera3HardwareInterface::validateStreamDimensions(
780        camera3_stream_configuration_t *streamList)
781{
782    int rc = NO_ERROR;
783    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
784    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
785    size_t count = 0;
786
787    camera3_stream_t *inputStream = NULL;
788    /*
789    * Loop through all streams to find input stream if it exists*
790    */
791    for (size_t i = 0; i< streamList->num_streams; i++) {
792        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
793            if (inputStream != NULL) {
794                ALOGE("%s: Error, Multiple input streams requested");
795                return -EINVAL;
796            }
797            inputStream = streamList->streams[i];
798        }
799    }
800    /*
801    * Loop through all streams requested in configuration
802    * Check if unsupported sizes have been requested on any of them
803    */
804    for (size_t j = 0; j < streamList->num_streams; j++) {
805        bool sizeFound = false;
806        size_t jpeg_sizes_cnt = 0;
807        camera3_stream_t *newStream = streamList->streams[j];
808
809        uint32_t rotatedHeight = newStream->height;
810        uint32_t rotatedWidth = newStream->width;
811        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
812                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
813            rotatedHeight = newStream->width;
814            rotatedWidth = newStream->height;
815        }
816
817        /*
818        * Sizes are different for each type of stream format check against
819        * appropriate table.
820        */
821        switch (newStream->format) {
822        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
823        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
824        case HAL_PIXEL_FORMAT_RAW10:
825            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
826            for (size_t i = 0; i < count; i++) {
827                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
828                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
829                    sizeFound = true;
830                    break;
831                }
832            }
833            break;
834        case HAL_PIXEL_FORMAT_BLOB:
835            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
836            /* Generate JPEG sizes table */
837            makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
838                    count,
839                    MAX_SIZES_CNT,
840                    available_processed_sizes);
841            jpeg_sizes_cnt = filterJpegSizes(
842                    available_jpeg_sizes,
843                    available_processed_sizes,
844                    count * 2,
845                    MAX_SIZES_CNT * 2,
846                    gCamCapability[mCameraId]->active_array_size,
847                    gCamCapability[mCameraId]->max_downscale_factor);
848
849            /* Verify set size against generated sizes table */
850            for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
851                if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
852                        ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
853                    sizeFound = true;
854                    break;
855                }
856            }
857            break;
858        case HAL_PIXEL_FORMAT_YCbCr_420_888:
859        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
860        default:
861            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
862                    || newStream->stream_type == CAMERA3_STREAM_INPUT
863                    || IS_USAGE_ZSL(newStream->usage)) {
864                if (((int32_t)rotatedWidth ==
865                                gCamCapability[mCameraId]->active_array_size.width) &&
866                                ((int32_t)rotatedHeight ==
867                                gCamCapability[mCameraId]->active_array_size.height)) {
868                    sizeFound = true;
869                    break;
870                }
871                /* We could potentially break here to enforce ZSL stream
872                 * set from frameworks always is full active array size
873                 * but it is not clear from the spc if framework will always
874                 * follow that, also we have logic to override to full array
875                 * size, so keeping the logic lenient at the moment
876                 */
877            }
878            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
879                    MAX_SIZES_CNT);
880            for (size_t i = 0; i < count; i++) {
881                if (((int32_t)rotatedWidth ==
882                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
883                            ((int32_t)rotatedHeight ==
884                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
885                    sizeFound = true;
886                    break;
887                }
888            }
889            break;
890        } /* End of switch(newStream->format) */
891
892        /* We error out even if a single stream has unsupported size set */
893        if (!sizeFound) {
894            ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
895                  "type:%d", __func__, rotatedWidth, rotatedHeight,
896                  newStream->format);
897            ALOGE("%s: Active array size is  %d x %d", __func__,
898                    gCamCapability[mCameraId]->active_array_size.width,
899                    gCamCapability[mCameraId]->active_array_size.height);
900            rc = -EINVAL;
901            break;
902        }
903    } /* End of for each stream */
904    return rc;
905}
906
907/*==============================================================================
908 * FUNCTION   : isSupportChannelNeeded
909 *
910 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
911 *
912 * PARAMETERS :
913 *   @stream_list : streams to be configured
914 *   @stream_config_info : the config info for streams to be configured
915 *
916 * RETURN     : Boolen true/false decision
917 *
918 *==========================================================================*/
919bool QCamera3HardwareInterface::isSupportChannelNeeded(
920        camera3_stream_configuration_t *streamList,
921        cam_stream_size_info_t stream_config_info)
922{
923    uint32_t i;
924    bool pprocRequested = false;
925    /* Check for conditions where PProc pipeline does not have any streams*/
926    for (i = 0; i < stream_config_info.num_streams; i++) {
927        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
928                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
929            pprocRequested = true;
930            break;
931        }
932    }
933
934    if (pprocRequested == false )
935        return true;
936
937    /* Dummy stream needed if only raw or jpeg streams present */
938    for (i = 0; i < streamList->num_streams; i++) {
939        switch(streamList->streams[i]->format) {
940            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
941            case HAL_PIXEL_FORMAT_RAW10:
942            case HAL_PIXEL_FORMAT_RAW16:
943            case HAL_PIXEL_FORMAT_BLOB:
944                break;
945            default:
946                return false;
947        }
948    }
949    return true;
950}
951
952/*==============================================================================
953 * FUNCTION   : getSensorOutputSize
954 *
955 * DESCRIPTION: Get sensor output size based on current stream configuratoin
956 *
957 * PARAMETERS :
958 *   @sensor_dim : sensor output dimension (output)
959 *
960 * RETURN     : int32_t type of status
961 *              NO_ERROR  -- success
962 *              none-zero failure code
963 *
964 *==========================================================================*/
965int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
966{
967    int32_t rc = NO_ERROR;
968
969    cam_dimension_t max_dim = {0, 0};
970    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
971        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
972            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
973        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
974            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
975    }
976
977    clear_metadata_buffer(mParameters);
978
979    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
980            max_dim);
981    if (rc != NO_ERROR) {
982        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
983        return rc;
984    }
985
986    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
987    if (rc != NO_ERROR) {
988        ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
989        return rc;
990    }
991
992    clear_metadata_buffer(mParameters);
993    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
994
995    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
996            mParameters);
997    if (rc != NO_ERROR) {
998        ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
999        return rc;
1000    }
1001
1002    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1003    ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
1004
1005    return rc;
1006}
1007
1008/*==============================================================================
1009 * FUNCTION   : enablePowerHint
1010 *
1011 * DESCRIPTION: enable single powerhint for preview and different video modes.
1012 *
1013 * PARAMETERS :
1014 *
1015 * RETURN     : NULL
1016 *
1017 *==========================================================================*/
1018void QCamera3HardwareInterface::enablePowerHint()
1019{
1020    if (!mPowerHintEnabled) {
1021        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1022        mPowerHintEnabled = true;
1023    }
1024}
1025
1026/*==============================================================================
1027 * FUNCTION   : disablePowerHint
1028 *
1029 * DESCRIPTION: disable current powerhint.
1030 *
1031 * PARAMETERS :
1032 *
1033 * RETURN     : NULL
1034 *
1035 *==========================================================================*/
1036void QCamera3HardwareInterface::disablePowerHint()
1037{
1038    if (mPowerHintEnabled) {
1039        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1040        mPowerHintEnabled = false;
1041    }
1042}
1043
1044/*===========================================================================
1045 * FUNCTION   : configureStreams
1046 *
1047 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1048 *              and output streams.
1049 *
1050 * PARAMETERS :
1051 *   @stream_list : streams to be configured
1052 *
1053 * RETURN     :
1054 *
1055 *==========================================================================*/
1056int QCamera3HardwareInterface::configureStreams(
1057        camera3_stream_configuration_t *streamList)
1058{
1059    ATRACE_CALL();
1060    int rc = 0;
1061
1062    // Acquire perfLock before configure streams
1063    m_perfLock.lock_acq();
1064    rc = configureStreamsPerfLocked(streamList);
1065    m_perfLock.lock_rel();
1066
1067    return rc;
1068}
1069
1070/*===========================================================================
1071 * FUNCTION   : configureStreamsPerfLocked
1072 *
1073 * DESCRIPTION: configureStreams while perfLock is held.
1074 *
1075 * PARAMETERS :
1076 *   @stream_list : streams to be configured
1077 *
1078 * RETURN     : int32_t type of status
1079 *              NO_ERROR  -- success
1080 *              none-zero failure code
1081 *==========================================================================*/
1082int QCamera3HardwareInterface::configureStreamsPerfLocked(
1083        camera3_stream_configuration_t *streamList)
1084{
1085    ATRACE_CALL();
1086    int rc = 0;
1087
1088    // Sanity check stream_list
1089    if (streamList == NULL) {
1090        ALOGE("%s: NULL stream configuration", __func__);
1091        return BAD_VALUE;
1092    }
1093    if (streamList->streams == NULL) {
1094        ALOGE("%s: NULL stream list", __func__);
1095        return BAD_VALUE;
1096    }
1097
1098    if (streamList->num_streams < 1) {
1099        ALOGE("%s: Bad number of streams requested: %d", __func__,
1100                streamList->num_streams);
1101        return BAD_VALUE;
1102    }
1103
1104    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1105        ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1106                MAX_NUM_STREAMS, streamList->num_streams);
1107        return BAD_VALUE;
1108    }
1109
1110    mOpMode = streamList->operation_mode;
1111    CDBG("%s: mOpMode: %d", __func__, mOpMode);
1112
1113    /* first invalidate all the steams in the mStreamList
1114     * if they appear again, they will be validated */
1115    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1116            it != mStreamInfo.end(); it++) {
1117        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1118        if (channel) {
1119          channel->stop();
1120        }
1121        (*it)->status = INVALID;
1122    }
1123
1124    if (mRawDumpChannel) {
1125        mRawDumpChannel->stop();
1126        delete mRawDumpChannel;
1127        mRawDumpChannel = NULL;
1128    }
1129
1130    if (mSupportChannel)
1131        mSupportChannel->stop();
1132
1133    if (mAnalysisChannel) {
1134        mAnalysisChannel->stop();
1135    }
1136    if (mMetadataChannel) {
1137        /* If content of mStreamInfo is not 0, there is metadata stream */
1138        mMetadataChannel->stop();
1139    }
1140    if (mChannelHandle) {
1141        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1142                mChannelHandle);
1143        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1144    }
1145
1146    pthread_mutex_lock(&mMutex);
1147
1148    /* Check whether we have video stream */
1149    m_bIs4KVideo = false;
1150    m_bIsVideo = false;
1151    m_bEisSupportedSize = false;
1152    m_bTnrEnabled = false;
1153    bool isZsl = false;
1154    uint32_t videoWidth = 0U;
1155    uint32_t videoHeight = 0U;
1156    size_t rawStreamCnt = 0;
1157    size_t stallStreamCnt = 0;
1158    size_t processedStreamCnt = 0;
1159    // Number of streams on ISP encoder path
1160    size_t numStreamsOnEncoder = 0;
1161    size_t numYuv888OnEncoder = 0;
1162    bool bYuv888OverrideJpeg = false;
1163    cam_dimension_t largeYuv888Size = {0, 0};
1164    cam_dimension_t maxViewfinderSize = {0, 0};
1165    bool bJpegExceeds4K = false;
1166    bool bJpegOnEncoder = false;
1167    bool bUseCommonFeatureMask = false;
1168    uint32_t commonFeatureMask = 0;
1169    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1170    camera3_stream_t *inputStream = NULL;
1171    bool isJpeg = false;
1172    cam_dimension_t jpegSize = {0, 0};
1173
1174    /*EIS configuration*/
1175    bool eisSupported = false;
1176    bool oisSupported = false;
1177    int32_t margin_index = -1;
1178    uint8_t eis_prop_set;
1179    uint32_t maxEisWidth = 0;
1180    uint32_t maxEisHeight = 0;
1181    int32_t hal_version = CAM_HAL_V3;
1182
1183    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1184
1185    size_t count = IS_TYPE_MAX;
1186    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1187    for (size_t i = 0; i < count; i++) {
1188        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1189            eisSupported = true;
1190            margin_index = (int32_t)i;
1191            break;
1192        }
1193    }
1194
1195    count = CAM_OPT_STAB_MAX;
1196    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1197    for (size_t i = 0; i < count; i++) {
1198        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1199            oisSupported = true;
1200            break;
1201        }
1202    }
1203
1204    if (eisSupported) {
1205        maxEisWidth = MAX_EIS_WIDTH;
1206        maxEisHeight = MAX_EIS_HEIGHT;
1207    }
1208
1209    /* EIS setprop control */
1210    char eis_prop[PROPERTY_VALUE_MAX];
1211    memset(eis_prop, 0, sizeof(eis_prop));
1212    property_get("persist.camera.eis.enable", eis_prop, "0");
1213    eis_prop_set = (uint8_t)atoi(eis_prop);
1214
1215    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1216            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1217
1218    /* stream configurations */
1219    for (size_t i = 0; i < streamList->num_streams; i++) {
1220        camera3_stream_t *newStream = streamList->streams[i];
1221        ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1222                "height = %d, rotation = %d, usage = 0x%x",
1223                __func__, i, newStream->stream_type, newStream->format,
1224                newStream->width, newStream->height, newStream->rotation,
1225                newStream->usage);
1226        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1227                newStream->stream_type == CAMERA3_STREAM_INPUT){
1228            isZsl = true;
1229        }
1230        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1231            inputStream = newStream;
1232        }
1233
1234        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1235            isJpeg = true;
1236            jpegSize.width = newStream->width;
1237            jpegSize.height = newStream->height;
1238            if (newStream->width > VIDEO_4K_WIDTH ||
1239                    newStream->height > VIDEO_4K_HEIGHT)
1240                bJpegExceeds4K = true;
1241        }
1242
1243        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1244                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1245            m_bIsVideo = true;
1246            videoWidth = newStream->width;
1247            videoHeight = newStream->height;
1248            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1249                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1250                m_bIs4KVideo = true;
1251            }
1252            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1253                                  (newStream->height <= maxEisHeight);
1254        }
1255        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1256                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1257            switch (newStream->format) {
1258            case HAL_PIXEL_FORMAT_BLOB:
1259                stallStreamCnt++;
1260                if (isOnEncoder(maxViewfinderSize, newStream->width,
1261                        newStream->height)) {
1262                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1263                    numStreamsOnEncoder++;
1264                    bJpegOnEncoder = true;
1265                }
1266                break;
1267            case HAL_PIXEL_FORMAT_RAW10:
1268            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1269            case HAL_PIXEL_FORMAT_RAW16:
1270                rawStreamCnt++;
1271                break;
1272            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1273                processedStreamCnt++;
1274                if (isOnEncoder(maxViewfinderSize, newStream->width,
1275                        newStream->height)) {
1276                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1277                            IS_USAGE_ZSL(newStream->usage)) {
1278                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1279                    } else {
1280                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1281                    }
1282                    numStreamsOnEncoder++;
1283                }
1284                break;
1285            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1286                processedStreamCnt++;
1287                if (isOnEncoder(maxViewfinderSize, newStream->width,
1288                        newStream->height)) {
1289                    // If Yuv888 size is not greater than 4K, set feature mask
1290                    // to SUPERSET so that it support concurrent request on
1291                    // YUV and JPEG.
1292                    if (newStream->width <= VIDEO_4K_WIDTH &&
1293                            newStream->height <= VIDEO_4K_HEIGHT) {
1294                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1295                    } else {
1296                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1297                    }
1298                    numStreamsOnEncoder++;
1299                    numYuv888OnEncoder++;
1300                    largeYuv888Size.width = newStream->width;
1301                    largeYuv888Size.height = newStream->height;
1302                }
1303                break;
1304            default:
1305                processedStreamCnt++;
1306                if (isOnEncoder(maxViewfinderSize, newStream->width,
1307                        newStream->height)) {
1308                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1309                    numStreamsOnEncoder++;
1310                }
1311                break;
1312            }
1313
1314        }
1315    }
1316
1317    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1318        !m_bIsVideo) {
1319        m_bEisEnable = false;
1320    }
1321
1322    /* Logic to enable/disable TNR based on specific config size/etc.*/
1323    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1324            ((videoWidth == 1920 && videoHeight == 1080) ||
1325            (videoWidth == 1280 && videoHeight == 720)) &&
1326            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1327        m_bTnrEnabled = true;
1328
1329    /* Check if num_streams is sane */
1330    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1331            rawStreamCnt > MAX_RAW_STREAMS ||
1332            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1333        ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1334                __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1335        pthread_mutex_unlock(&mMutex);
1336        return -EINVAL;
1337    }
1338    /* Check whether we have zsl stream or 4k video case */
1339    if (isZsl && m_bIsVideo) {
1340        ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1341        pthread_mutex_unlock(&mMutex);
1342        return -EINVAL;
1343    }
1344    /* Check if stream sizes are sane */
1345    if (numStreamsOnEncoder > 2) {
1346        ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1347                __func__);
1348        pthread_mutex_unlock(&mMutex);
1349        return -EINVAL;
1350    } else if (1 < numStreamsOnEncoder){
1351        bUseCommonFeatureMask = true;
1352        CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1353                __func__);
1354    }
1355
1356    /* Check if BLOB size is greater than 4k in 4k recording case */
1357    if (m_bIs4KVideo && bJpegExceeds4K) {
1358        ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1359                __func__);
1360        pthread_mutex_unlock(&mMutex);
1361        return -EINVAL;
1362    }
1363
1364    // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1365    // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1366    // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1367    // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1368    // configurations:
1369    //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1370    //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1371    //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1372    if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1373        ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1374                __func__);
1375        pthread_mutex_unlock(&mMutex);
1376        return -EINVAL;
1377    }
1378
1379    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1380    // the YUV stream's size is greater or equal to the JPEG size, set common
1381    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1382    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1383            jpegSize.width, jpegSize.height) &&
1384            largeYuv888Size.width > jpegSize.width &&
1385            largeYuv888Size.height > jpegSize.height) {
1386        bYuv888OverrideJpeg = true;
1387    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1388        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1389    }
1390
1391    rc = validateStreamDimensions(streamList);
1392    if (rc == NO_ERROR) {
1393        rc = validateStreamRotations(streamList);
1394    }
1395    if (rc != NO_ERROR) {
1396        ALOGE("%s: Invalid stream configuration requested!", __func__);
1397        pthread_mutex_unlock(&mMutex);
1398        return rc;
1399    }
1400
1401    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1402    camera3_stream_t *jpegStream = NULL;
1403    for (size_t i = 0; i < streamList->num_streams; i++) {
1404        camera3_stream_t *newStream = streamList->streams[i];
1405        CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1406                "stream size : %d x %d, stream rotation = %d",
1407                __func__, newStream->stream_type, newStream->format,
1408                newStream->width, newStream->height, newStream->rotation);
1409        //if the stream is in the mStreamList validate it
1410        bool stream_exists = false;
1411        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1412                it != mStreamInfo.end(); it++) {
1413            if ((*it)->stream == newStream) {
1414                QCamera3ProcessingChannel *channel =
1415                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1416                stream_exists = true;
1417                if (channel)
1418                    delete channel;
1419                (*it)->status = VALID;
1420                (*it)->stream->priv = NULL;
1421                (*it)->channel = NULL;
1422            }
1423        }
1424        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1425            //new stream
1426            stream_info_t* stream_info;
1427            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1428            if (!stream_info) {
1429               ALOGE("%s: Could not allocate stream info", __func__);
1430               rc = -ENOMEM;
1431               pthread_mutex_unlock(&mMutex);
1432               return rc;
1433            }
1434            stream_info->stream = newStream;
1435            stream_info->status = VALID;
1436            stream_info->channel = NULL;
1437            mStreamInfo.push_back(stream_info);
1438        }
1439        /* Covers Opaque ZSL and API1 F/W ZSL */
1440        if (IS_USAGE_ZSL(newStream->usage)
1441                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1442            if (zslStream != NULL) {
1443                ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1444                pthread_mutex_unlock(&mMutex);
1445                return BAD_VALUE;
1446            }
1447            zslStream = newStream;
1448        }
1449        /* Covers YUV reprocess */
1450        if (inputStream != NULL) {
1451            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1452                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1453                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1454                    && inputStream->width == newStream->width
1455                    && inputStream->height == newStream->height) {
1456                if (zslStream != NULL) {
1457                    /* This scenario indicates multiple YUV streams with same size
1458                     * as input stream have been requested, since zsl stream handle
1459                     * is solely use for the purpose of overriding the size of streams
1460                     * which share h/w streams we will just make a guess here as to
1461                     * which of the stream is a ZSL stream, this will be refactored
1462                     * once we make generic logic for streams sharing encoder output
1463                     */
1464                    CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1465                }
1466                zslStream = newStream;
1467            }
1468        }
1469        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1470            jpegStream = newStream;
1471        }
1472    }
1473
1474    /* If a zsl stream is set, we know that we have configured at least one input or
1475       bidirectional stream */
1476    if (NULL != zslStream) {
1477        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1478        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1479        mInputStreamInfo.format = zslStream->format;
1480        mInputStreamInfo.usage = zslStream->usage;
1481        CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1482                __func__, mInputStreamInfo.dim.width,
1483                mInputStreamInfo.dim.height,
1484                mInputStreamInfo.format, mInputStreamInfo.usage);
1485    }
1486
1487    cleanAndSortStreamInfo();
1488    if (mMetadataChannel) {
1489        delete mMetadataChannel;
1490        mMetadataChannel = NULL;
1491    }
1492    if (mSupportChannel) {
1493        delete mSupportChannel;
1494        mSupportChannel = NULL;
1495    }
1496
1497    if (mAnalysisChannel) {
1498        delete mAnalysisChannel;
1499        mAnalysisChannel = NULL;
1500    }
1501
1502    if (mDummyBatchChannel) {
1503        delete mDummyBatchChannel;
1504        mDummyBatchChannel = NULL;
1505    }
1506
1507    //Create metadata channel and initialize it
1508    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1509                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1510                    &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1511    if (mMetadataChannel == NULL) {
1512        ALOGE("%s: failed to allocate metadata channel", __func__);
1513        rc = -ENOMEM;
1514        pthread_mutex_unlock(&mMutex);
1515        return rc;
1516    }
1517    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1518    if (rc < 0) {
1519        ALOGE("%s: metadata channel initialization failed", __func__);
1520        delete mMetadataChannel;
1521        mMetadataChannel = NULL;
1522        pthread_mutex_unlock(&mMutex);
1523        return rc;
1524    }
1525
1526    // Create analysis stream all the time, even when h/w support is not available
1527    {
1528        mAnalysisChannel = new QCamera3SupportChannel(
1529                mCameraHandle->camera_handle,
1530                mChannelHandle,
1531                mCameraHandle->ops,
1532                &gCamCapability[mCameraId]->padding_info,
1533                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1534                CAM_STREAM_TYPE_ANALYSIS,
1535                &gCamCapability[mCameraId]->analysis_recommended_res,
1536                gCamCapability[mCameraId]->analysis_recommended_format,
1537                this,
1538                0); // force buffer count to 0
1539        if (!mAnalysisChannel) {
1540            ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1541            pthread_mutex_unlock(&mMutex);
1542            return -ENOMEM;
1543        }
1544    }
1545
1546    bool isRawStreamRequested = false;
1547    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1548    /* Allocate channel objects for the requested streams */
1549    for (size_t i = 0; i < streamList->num_streams; i++) {
1550        camera3_stream_t *newStream = streamList->streams[i];
1551        uint32_t stream_usage = newStream->usage;
1552        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1553        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1554        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1555                || IS_USAGE_ZSL(newStream->usage)) &&
1556            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1557            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1558            if (bUseCommonFeatureMask) {
1559                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1560                        commonFeatureMask;
1561            } else {
1562                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1563                        CAM_QCOM_FEATURE_NONE;
1564            }
1565
1566        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1567                CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1568        } else {
1569            //for non zsl streams find out the format
1570            switch (newStream->format) {
1571            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1572              {
1573                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1574                         = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1575
1576                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1577
1578                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1579                     if (m_bTnrEnabled && m_bTnrVideo) {
1580                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1581                             CAM_QCOM_FEATURE_CPP_TNR;
1582                     }
1583
1584                 } else {
1585
1586                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1587                     if (m_bTnrEnabled && m_bTnrPreview) {
1588                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1589                             CAM_QCOM_FEATURE_CPP_TNR;
1590                     }
1591                 }
1592
1593                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1594                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1595                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1596                             newStream->height;
1597                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1598                             newStream->width;
1599                 }
1600              }
1601              break;
1602           case HAL_PIXEL_FORMAT_YCbCr_420_888:
1603              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1604              if (isOnEncoder(maxViewfinderSize, newStream->width,
1605                      newStream->height)) {
1606                  if (bUseCommonFeatureMask)
1607                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1608                              commonFeatureMask;
1609                  else
1610                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1611                              CAM_QCOM_FEATURE_NONE;
1612              } else {
1613                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1614                          CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1615              }
1616              break;
1617           case HAL_PIXEL_FORMAT_BLOB:
1618              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1619              if (m_bIs4KVideo && !isZsl) {
1620                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1621                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1622              } else {
1623                  if (bUseCommonFeatureMask &&
1624                          isOnEncoder(maxViewfinderSize, newStream->width,
1625                                  newStream->height)) {
1626                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1627                  } else {
1628                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1629                  }
1630              }
1631              if (isZsl) {
1632                  if (zslStream) {
1633                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1634                              (int32_t)zslStream->width;
1635                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1636                              (int32_t)zslStream->height;
1637                  } else {
1638                      ALOGE("%s: Error, No ZSL stream identified",__func__);
1639                      pthread_mutex_unlock(&mMutex);
1640                      return -EINVAL;
1641                  }
1642              } else if (m_bIs4KVideo) {
1643                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1644                          (int32_t)videoWidth;
1645                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1646                          (int32_t)videoHeight;
1647              } else if (bYuv888OverrideJpeg) {
1648                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1649                          (int32_t)largeYuv888Size.width;
1650                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1651                          (int32_t)largeYuv888Size.height;
1652              }
1653              break;
1654           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1655           case HAL_PIXEL_FORMAT_RAW16:
1656           case HAL_PIXEL_FORMAT_RAW10:
1657              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1658              isRawStreamRequested = true;
1659              break;
1660           default:
1661              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1662              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1663              break;
1664            }
1665
1666        }
1667
1668        if (newStream->priv == NULL) {
1669            //New stream, construct channel
1670            switch (newStream->stream_type) {
1671            case CAMERA3_STREAM_INPUT:
1672                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1673                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1674                break;
1675            case CAMERA3_STREAM_BIDIRECTIONAL:
1676                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1677                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1678                break;
1679            case CAMERA3_STREAM_OUTPUT:
1680                /* For video encoding stream, set read/write rarely
1681                 * flag so that they may be set to un-cached */
1682                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1683                    newStream->usage |=
1684                         (GRALLOC_USAGE_SW_READ_RARELY |
1685                         GRALLOC_USAGE_SW_WRITE_RARELY |
1686                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1687                else if (IS_USAGE_ZSL(newStream->usage))
1688                    CDBG("%s: ZSL usage flag skipping", __func__);
1689                else if (newStream == zslStream
1690                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1691                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1692                } else
1693                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1694                break;
1695            default:
1696                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1697                break;
1698            }
1699
1700            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1701                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1702                QCamera3ProcessingChannel *channel = NULL;
1703                switch (newStream->format) {
1704                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1705                    if ((newStream->usage &
1706                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1707                            (streamList->operation_mode ==
1708                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1709                    ) {
1710                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1711                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1712                                &gCamCapability[mCameraId]->padding_info,
1713                                this,
1714                                newStream,
1715                                (cam_stream_type_t)
1716                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1717                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1718                                mMetadataChannel,
1719                                0); //heap buffers are not required for HFR video channel
1720                        if (channel == NULL) {
1721                            ALOGE("%s: allocation of channel failed", __func__);
1722                            pthread_mutex_unlock(&mMutex);
1723                            return -ENOMEM;
1724                        }
1725                        //channel->getNumBuffers() will return 0 here so use
1726                        //MAX_INFLIGH_HFR_REQUESTS
1727                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1728                        newStream->priv = channel;
1729                        ALOGI("%s: num video buffers in HFR mode: %d",
1730                                __func__, MAX_INFLIGHT_HFR_REQUESTS);
1731                    } else {
1732                        /* Copy stream contents in HFR preview only case to create
1733                         * dummy batch channel so that sensor streaming is in
1734                         * HFR mode */
1735                        if (!m_bIsVideo && (streamList->operation_mode ==
1736                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1737                            mDummyBatchStream = *newStream;
1738                        }
1739                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1740                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1741                                &gCamCapability[mCameraId]->padding_info,
1742                                this,
1743                                newStream,
1744                                (cam_stream_type_t)
1745                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1746                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1747                                mMetadataChannel,
1748                                MAX_INFLIGHT_REQUESTS);
1749                        if (channel == NULL) {
1750                            ALOGE("%s: allocation of channel failed", __func__);
1751                            pthread_mutex_unlock(&mMutex);
1752                            return -ENOMEM;
1753                        }
1754                        newStream->max_buffers = channel->getNumBuffers();
1755                        newStream->priv = channel;
1756                    }
1757                    break;
1758                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1759                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1760                            mChannelHandle,
1761                            mCameraHandle->ops, captureResultCb,
1762                            &gCamCapability[mCameraId]->padding_info,
1763                            this,
1764                            newStream,
1765                            (cam_stream_type_t)
1766                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1767                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1768                            mMetadataChannel);
1769                    if (channel == NULL) {
1770                        ALOGE("%s: allocation of YUV channel failed", __func__);
1771                        pthread_mutex_unlock(&mMutex);
1772                        return -ENOMEM;
1773                    }
1774                    newStream->max_buffers = channel->getNumBuffers();
1775                    newStream->priv = channel;
1776                    break;
1777                }
1778                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1779                case HAL_PIXEL_FORMAT_RAW16:
1780                case HAL_PIXEL_FORMAT_RAW10:
1781                    mRawChannel = new QCamera3RawChannel(
1782                            mCameraHandle->camera_handle, mChannelHandle,
1783                            mCameraHandle->ops, captureResultCb,
1784                            &gCamCapability[mCameraId]->padding_info,
1785                            this, newStream, CAM_QCOM_FEATURE_NONE,
1786                            mMetadataChannel,
1787                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1788                    if (mRawChannel == NULL) {
1789                        ALOGE("%s: allocation of raw channel failed", __func__);
1790                        pthread_mutex_unlock(&mMutex);
1791                        return -ENOMEM;
1792                    }
1793                    newStream->max_buffers = mRawChannel->getNumBuffers();
1794                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1795                    break;
1796                case HAL_PIXEL_FORMAT_BLOB:
1797                    // Max live snapshot inflight buffer is 1. This is to mitigate
1798                    // frame drop issues for video snapshot. The more buffers being
1799                    // allocated, the more frame drops there are.
1800                    mPictureChannel = new QCamera3PicChannel(
1801                            mCameraHandle->camera_handle, mChannelHandle,
1802                            mCameraHandle->ops, captureResultCb,
1803                            &gCamCapability[mCameraId]->padding_info, this, newStream,
1804                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1805                            m_bIs4KVideo, isZsl, mMetadataChannel,
1806                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1807                    if (mPictureChannel == NULL) {
1808                        ALOGE("%s: allocation of channel failed", __func__);
1809                        pthread_mutex_unlock(&mMutex);
1810                        return -ENOMEM;
1811                    }
1812                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1813                    newStream->max_buffers = mPictureChannel->getNumBuffers();
1814                    mPictureChannel->overrideYuvSize(
1815                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1816                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1817                    break;
1818
1819                default:
1820                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1821                    break;
1822                }
1823            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1824                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1825            } else {
1826                ALOGE("%s: Error, Unknown stream type", __func__);
1827                return -EINVAL;
1828            }
1829
1830            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1831                    it != mStreamInfo.end(); it++) {
1832                if ((*it)->stream == newStream) {
1833                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1834                    break;
1835                }
1836            }
1837        } else {
1838            // Channel already exists for this stream
1839            // Do nothing for now
1840        }
1841
1842    /* Do not add entries for input stream in metastream info
1843         * since there is no real stream associated with it
1844         */
1845        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1846            mStreamConfigInfo.num_streams++;
1847    }
1848
1849    //RAW DUMP channel
1850    if (mEnableRawDump && isRawStreamRequested == false){
1851        cam_dimension_t rawDumpSize;
1852        rawDumpSize = getMaxRawSize(mCameraId);
1853        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1854                                  mChannelHandle,
1855                                  mCameraHandle->ops,
1856                                  rawDumpSize,
1857                                  &gCamCapability[mCameraId]->padding_info,
1858                                  this, CAM_QCOM_FEATURE_NONE);
1859        if (!mRawDumpChannel) {
1860            ALOGE("%s: Raw Dump channel cannot be created", __func__);
1861            pthread_mutex_unlock(&mMutex);
1862            return -ENOMEM;
1863        }
1864    }
1865
1866
1867    if (mAnalysisChannel) {
1868        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1869                gCamCapability[mCameraId]->analysis_recommended_res;
1870        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1871                CAM_STREAM_TYPE_ANALYSIS;
1872        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1873                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1874        mStreamConfigInfo.num_streams++;
1875    }
1876
1877    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1878        mSupportChannel = new QCamera3SupportChannel(
1879                mCameraHandle->camera_handle,
1880                mChannelHandle,
1881                mCameraHandle->ops,
1882                &gCamCapability[mCameraId]->padding_info,
1883                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1884                CAM_STREAM_TYPE_CALLBACK,
1885                &QCamera3SupportChannel::kDim,
1886                CAM_FORMAT_YUV_420_NV21,
1887                this);
1888        if (!mSupportChannel) {
1889            ALOGE("%s: dummy channel cannot be created", __func__);
1890            pthread_mutex_unlock(&mMutex);
1891            return -ENOMEM;
1892        }
1893    }
1894
1895    if (mSupportChannel) {
1896        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1897                QCamera3SupportChannel::kDim;
1898        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1899                CAM_STREAM_TYPE_CALLBACK;
1900        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1901                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1902        mStreamConfigInfo.num_streams++;
1903    }
1904
1905    if (mRawDumpChannel) {
1906        cam_dimension_t rawSize;
1907        rawSize = getMaxRawSize(mCameraId);
1908        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1909                rawSize;
1910        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1911                CAM_STREAM_TYPE_RAW;
1912        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1913                CAM_QCOM_FEATURE_NONE;
1914        mStreamConfigInfo.num_streams++;
1915    }
1916    /* In HFR mode, if video stream is not added, create a dummy channel so that
1917     * ISP can create a batch mode even for preview only case. This channel is
1918     * never 'start'ed (no stream-on), it is only 'initialized'  */
1919    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1920            !m_bIsVideo) {
1921        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1922                mChannelHandle,
1923                mCameraHandle->ops, captureResultCb,
1924                &gCamCapability[mCameraId]->padding_info,
1925                this,
1926                &mDummyBatchStream,
1927                CAM_STREAM_TYPE_VIDEO,
1928                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1929                mMetadataChannel);
1930        if (NULL == mDummyBatchChannel) {
1931            ALOGE("%s: creation of mDummyBatchChannel failed."
1932                    "Preview will use non-hfr sensor mode ", __func__);
1933        }
1934    }
1935    if (mDummyBatchChannel) {
1936        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1937                mDummyBatchStream.width;
1938        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1939                mDummyBatchStream.height;
1940        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1941                CAM_STREAM_TYPE_VIDEO;
1942        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1943                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1944        mStreamConfigInfo.num_streams++;
1945    }
1946
1947    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
1948    mStreamConfigInfo.buffer_info.max_buffers =
1949            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
1950
1951    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
1952    for (pendingRequestIterator i = mPendingRequestsList.begin();
1953            i != mPendingRequestsList.end();) {
1954        i = erasePendingRequest(i);
1955    }
1956    mPendingFrameDropList.clear();
1957    // Initialize/Reset the pending buffers list
1958    mPendingBuffersMap.num_buffers = 0;
1959    mPendingBuffersMap.mPendingBufferList.clear();
1960    mPendingReprocessResultList.clear();
1961
1962    mFirstRequest = true;
1963    mCurJpegMeta.clear();
1964    //Get min frame duration for this streams configuration
1965    deriveMinFrameDuration();
1966
1967    /* Turn on video hint only if video stream is configured */
1968
1969    pthread_mutex_unlock(&mMutex);
1970
1971    return rc;
1972}
1973
1974/*===========================================================================
1975 * FUNCTION   : validateCaptureRequest
1976 *
1977 * DESCRIPTION: validate a capture request from camera service
1978 *
1979 * PARAMETERS :
1980 *   @request : request from framework to process
1981 *
1982 * RETURN     :
1983 *
1984 *==========================================================================*/
1985int QCamera3HardwareInterface::validateCaptureRequest(
1986                    camera3_capture_request_t *request)
1987{
1988    ssize_t idx = 0;
1989    const camera3_stream_buffer_t *b;
1990    CameraMetadata meta;
1991
1992    /* Sanity check the request */
1993    if (request == NULL) {
1994        ALOGE("%s: NULL capture request", __func__);
1995        return BAD_VALUE;
1996    }
1997
1998    if (request->settings == NULL && mFirstRequest) {
1999        /*settings cannot be null for the first request*/
2000        return BAD_VALUE;
2001    }
2002
2003    uint32_t frameNumber = request->frame_number;
2004    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2005        ALOGE("%s: Request %d: No output buffers provided!",
2006                __FUNCTION__, frameNumber);
2007        return BAD_VALUE;
2008    }
2009    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2010        ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
2011                __func__, request->num_output_buffers, MAX_NUM_STREAMS);
2012        return BAD_VALUE;
2013    }
2014    if (request->input_buffer != NULL) {
2015        b = request->input_buffer;
2016        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2017            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2018                    __func__, frameNumber, (long)idx);
2019            return BAD_VALUE;
2020        }
2021        if (b->release_fence != -1) {
2022            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2023                    __func__, frameNumber, (long)idx);
2024            return BAD_VALUE;
2025        }
2026        if (b->buffer == NULL) {
2027            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2028                    __func__, frameNumber, (long)idx);
2029            return BAD_VALUE;
2030        }
2031    }
2032
2033    // Validate all buffers
2034    b = request->output_buffers;
2035    do {
2036        QCamera3ProcessingChannel *channel =
2037                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2038        if (channel == NULL) {
2039            ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
2040                    __func__, frameNumber, (long)idx);
2041            return BAD_VALUE;
2042        }
2043        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2044            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2045                    __func__, frameNumber, (long)idx);
2046            return BAD_VALUE;
2047        }
2048        if (b->release_fence != -1) {
2049            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2050                    __func__, frameNumber, (long)idx);
2051            return BAD_VALUE;
2052        }
2053        if (b->buffer == NULL) {
2054            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2055                    __func__, frameNumber, (long)idx);
2056            return BAD_VALUE;
2057        }
2058        if (*(b->buffer) == NULL) {
2059            ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2060                    __func__, frameNumber, (long)idx);
2061            return BAD_VALUE;
2062        }
2063        idx++;
2064        b = request->output_buffers + idx;
2065    } while (idx < (ssize_t)request->num_output_buffers);
2066
2067    return NO_ERROR;
2068}
2069
2070/*===========================================================================
2071 * FUNCTION   : deriveMinFrameDuration
2072 *
2073 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2074 *              on currently configured streams.
2075 *
2076 * PARAMETERS : NONE
2077 *
2078 * RETURN     : NONE
2079 *
2080 *==========================================================================*/
2081void QCamera3HardwareInterface::deriveMinFrameDuration()
2082{
2083    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2084
2085    maxJpegDim = 0;
2086    maxProcessedDim = 0;
2087    maxRawDim = 0;
2088
2089    // Figure out maximum jpeg, processed, and raw dimensions
2090    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2091        it != mStreamInfo.end(); it++) {
2092
2093        // Input stream doesn't have valid stream_type
2094        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2095            continue;
2096
2097        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2098        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2099            if (dimension > maxJpegDim)
2100                maxJpegDim = dimension;
2101        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2102                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2103                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2104            if (dimension > maxRawDim)
2105                maxRawDim = dimension;
2106        } else {
2107            if (dimension > maxProcessedDim)
2108                maxProcessedDim = dimension;
2109        }
2110    }
2111
2112    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2113            MAX_SIZES_CNT);
2114
2115    //Assume all jpeg dimensions are in processed dimensions.
2116    if (maxJpegDim > maxProcessedDim)
2117        maxProcessedDim = maxJpegDim;
2118    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2119    if (maxProcessedDim > maxRawDim) {
2120        maxRawDim = INT32_MAX;
2121
2122        for (size_t i = 0; i < count; i++) {
2123            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2124                    gCamCapability[mCameraId]->raw_dim[i].height;
2125            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2126                maxRawDim = dimension;
2127        }
2128    }
2129
2130    //Find minimum durations for processed, jpeg, and raw
2131    for (size_t i = 0; i < count; i++) {
2132        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2133                gCamCapability[mCameraId]->raw_dim[i].height) {
2134            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2135            break;
2136        }
2137    }
2138    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2139    for (size_t i = 0; i < count; i++) {
2140        if (maxProcessedDim ==
2141                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2142                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2143            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2144            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2145            break;
2146        }
2147    }
2148}
2149
2150/*===========================================================================
2151 * FUNCTION   : getMinFrameDuration
2152 *
2153 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2154 *              and current request configuration.
2155 *
2156 * PARAMETERS : @request: requset sent by the frameworks
2157 *
2158 * RETURN     : min farme duration for a particular request
2159 *
2160 *==========================================================================*/
2161int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2162{
2163    bool hasJpegStream = false;
2164    bool hasRawStream = false;
2165    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2166        const camera3_stream_t *stream = request->output_buffers[i].stream;
2167        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2168            hasJpegStream = true;
2169        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2170                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2171                stream->format == HAL_PIXEL_FORMAT_RAW16)
2172            hasRawStream = true;
2173    }
2174
2175    if (!hasJpegStream)
2176        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2177    else
2178        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2179}
2180
2181/*===========================================================================
2182 * FUNCTION   : handlePendingReprocResults
2183 *
2184 * DESCRIPTION: check and notify on any pending reprocess results
2185 *
2186 * PARAMETERS :
2187 *   @frame_number   : Pending request frame number
2188 *
2189 * RETURN     : int32_t type of status
2190 *              NO_ERROR  -- success
2191 *              none-zero failure code
2192 *==========================================================================*/
2193int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2194{
2195    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2196            j != mPendingReprocessResultList.end(); j++) {
2197        if (j->frame_number == frame_number) {
2198            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2199
2200            CDBG("%s: Delayed reprocess notify %d", __func__,
2201                    frame_number);
2202
2203            for (pendingRequestIterator k = mPendingRequestsList.begin();
2204                    k != mPendingRequestsList.end(); k++) {
2205
2206                if (k->frame_number == j->frame_number) {
2207                    CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2208                            "Take it out!!", __func__,
2209                            k->frame_number);
2210
2211                    camera3_capture_result result;
2212                    memset(&result, 0, sizeof(camera3_capture_result));
2213                    result.frame_number = frame_number;
2214                    result.num_output_buffers = 1;
2215                    result.output_buffers =  &j->buffer;
2216                    result.input_buffer = k->input_buffer;
2217                    result.result = k->settings;
2218                    result.partial_result = PARTIAL_RESULT_COUNT;
2219                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2220
2221                    erasePendingRequest(k);
2222                    break;
2223                }
2224            }
2225            mPendingReprocessResultList.erase(j);
2226            break;
2227        }
2228    }
2229    return NO_ERROR;
2230}
2231
2232/*===========================================================================
2233 * FUNCTION   : handleBatchMetadata
2234 *
2235 * DESCRIPTION: Handles metadata buffer callback in batch mode
2236 *
2237 * PARAMETERS : @metadata_buf: metadata buffer
2238 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2239 *                 the meta buf in this method
2240 *
2241 * RETURN     :
2242 *
2243 *==========================================================================*/
2244void QCamera3HardwareInterface::handleBatchMetadata(
2245        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2246{
2247    ATRACE_CALL();
2248
2249    if (NULL == metadata_buf) {
2250        ALOGE("%s: metadata_buf is NULL", __func__);
2251        return;
2252    }
2253    /* In batch mode, the metdata will contain the frame number and timestamp of
2254     * the last frame in the batch. Eg: a batch containing buffers from request
2255     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2256     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2257     * multiple process_capture_results */
2258    metadata_buffer_t *metadata =
2259            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2260    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2261    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2262    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2263    uint32_t frame_number = 0, urgent_frame_number = 0;
2264    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2265    bool invalid_metadata = false;
2266    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2267    size_t loopCount = 1;
2268
2269    int32_t *p_frame_number_valid =
2270            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2271    uint32_t *p_frame_number =
2272            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2273    int64_t *p_capture_time =
2274            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2275    int32_t *p_urgent_frame_number_valid =
2276            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2277    uint32_t *p_urgent_frame_number =
2278            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2279
2280    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2281            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2282            (NULL == p_urgent_frame_number)) {
2283        ALOGE("%s: Invalid metadata", __func__);
2284        invalid_metadata = true;
2285    } else {
2286        frame_number_valid = *p_frame_number_valid;
2287        last_frame_number = *p_frame_number;
2288        last_frame_capture_time = *p_capture_time;
2289        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2290        last_urgent_frame_number = *p_urgent_frame_number;
2291    }
2292
2293    /* In batchmode, when no video buffers are requested, set_parms are sent
2294     * for every capture_request. The difference between consecutive urgent
2295     * frame numbers and frame numbers should be used to interpolate the
2296     * corresponding frame numbers and time stamps */
2297    pthread_mutex_lock(&mMutex);
2298    if (urgent_frame_number_valid) {
2299        first_urgent_frame_number =
2300                mPendingBatchMap.valueFor(last_urgent_frame_number);
2301        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2302                first_urgent_frame_number;
2303
2304        CDBG("%s: urgent_frm: valid: %d frm_num: %d - %d",
2305                __func__, urgent_frame_number_valid,
2306                first_urgent_frame_number, last_urgent_frame_number);
2307    }
2308
2309    if (frame_number_valid) {
2310        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2311        frameNumDiff = last_frame_number + 1 -
2312                first_frame_number;
2313        mPendingBatchMap.removeItem(last_frame_number);
2314
2315        CDBG("%s:        frm: valid: %d frm_num: %d - %d",
2316                __func__, frame_number_valid,
2317                first_frame_number, last_frame_number);
2318
2319    }
2320    pthread_mutex_unlock(&mMutex);
2321
2322    if (urgent_frame_number_valid || frame_number_valid) {
2323        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2324        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2325            ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
2326                    __func__, urgentFrameNumDiff, last_urgent_frame_number);
2327        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2328            ALOGE("%s: frameNumDiff: %d frameNum: %d",
2329                    __func__, frameNumDiff, last_frame_number);
2330    }
2331
2332    for (size_t i = 0; i < loopCount; i++) {
2333        /* handleMetadataWithLock is called even for invalid_metadata for
2334         * pipeline depth calculation */
2335        if (!invalid_metadata) {
2336            /* Infer frame number. Batch metadata contains frame number of the
2337             * last frame */
2338            if (urgent_frame_number_valid) {
2339                if (i < urgentFrameNumDiff) {
2340                    urgent_frame_number =
2341                            first_urgent_frame_number + i;
2342                    CDBG("%s: inferred urgent frame_number: %d",
2343                            __func__, urgent_frame_number);
2344                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2345                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2346                } else {
2347                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2348                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2349                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2350                }
2351            }
2352
2353            /* Infer frame number. Batch metadata contains frame number of the
2354             * last frame */
2355            if (frame_number_valid) {
2356                if (i < frameNumDiff) {
2357                    frame_number = first_frame_number + i;
2358                    CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2359                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2360                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2361                } else {
2362                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2363                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2364                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2365                }
2366            }
2367
2368            if (last_frame_capture_time) {
2369                //Infer timestamp
2370                first_frame_capture_time = last_frame_capture_time -
2371                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
2372                capture_time =
2373                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
2374                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2375                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2376                CDBG("%s: batch capture_time: %lld, capture_time: %lld",
2377                        __func__, last_frame_capture_time, capture_time);
2378            }
2379        }
2380        pthread_mutex_lock(&mMutex);
2381        handleMetadataWithLock(metadata_buf,
2382                false /* free_and_bufdone_meta_buf */);
2383        pthread_mutex_unlock(&mMutex);
2384    }
2385
2386done_batch_metadata:
2387    /* BufDone metadata buffer */
2388    if (free_and_bufdone_meta_buf) {
2389        mMetadataChannel->bufDone(metadata_buf);
2390        free(metadata_buf);
2391    }
2392}
2393
2394/*===========================================================================
2395 * FUNCTION   : handleMetadataWithLock
2396 *
2397 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2398 *
2399 * PARAMETERS : @metadata_buf: metadata buffer
2400 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2401 *                 the meta buf in this method
2402 *
2403 * RETURN     :
2404 *
2405 *==========================================================================*/
2406void QCamera3HardwareInterface::handleMetadataWithLock(
2407    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2408{
2409    ATRACE_CALL();
2410
2411    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2412    int32_t frame_number_valid, urgent_frame_number_valid;
2413    uint32_t frame_number, urgent_frame_number;
2414    int64_t capture_time;
2415
2416    int32_t *p_frame_number_valid =
2417            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2418    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2419    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2420    int32_t *p_urgent_frame_number_valid =
2421            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2422    uint32_t *p_urgent_frame_number =
2423            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2424    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2425            metadata) {
2426        CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2427                __func__, *p_frame_number_valid, *p_frame_number);
2428    }
2429
2430    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2431            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2432        ALOGE("%s: Invalid metadata", __func__);
2433        if (free_and_bufdone_meta_buf) {
2434            mMetadataChannel->bufDone(metadata_buf);
2435            free(metadata_buf);
2436        }
2437        goto done_metadata;
2438    } else {
2439        frame_number_valid = *p_frame_number_valid;
2440        frame_number = *p_frame_number;
2441        capture_time = *p_capture_time;
2442        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2443        urgent_frame_number = *p_urgent_frame_number;
2444    }
2445    //Partial result on process_capture_result for timestamp
2446    if (urgent_frame_number_valid) {
2447        CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2448          __func__, urgent_frame_number, capture_time);
2449
2450        //Recieved an urgent Frame Number, handle it
2451        //using partial results
2452        for (pendingRequestIterator i =
2453                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2454            CDBG("%s: Iterator Frame = %d urgent frame = %d",
2455                __func__, i->frame_number, urgent_frame_number);
2456
2457            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2458                (i->partial_result_cnt == 0)) {
2459                ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2460                    __func__, i->frame_number);
2461            }
2462
2463            if (i->frame_number == urgent_frame_number &&
2464                     i->bUrgentReceived == 0) {
2465
2466                camera3_capture_result_t result;
2467                memset(&result, 0, sizeof(camera3_capture_result_t));
2468
2469                i->partial_result_cnt++;
2470                i->bUrgentReceived = 1;
2471                // Extract 3A metadata
2472                result.result =
2473                    translateCbUrgentMetadataToResultMetadata(metadata);
2474                // Populate metadata result
2475                result.frame_number = urgent_frame_number;
2476                result.num_output_buffers = 0;
2477                result.output_buffers = NULL;
2478                result.partial_result = i->partial_result_cnt;
2479
2480                mCallbackOps->process_capture_result(mCallbackOps, &result);
2481                CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2482                     __func__, result.frame_number, capture_time);
2483                free_camera_metadata((camera_metadata_t *)result.result);
2484                break;
2485            }
2486        }
2487    }
2488
2489    if (!frame_number_valid) {
2490        CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2491        if (free_and_bufdone_meta_buf) {
2492            mMetadataChannel->bufDone(metadata_buf);
2493            free(metadata_buf);
2494        }
2495        goto done_metadata;
2496    }
2497    CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2498            frame_number, capture_time);
2499
2500    for (pendingRequestIterator i = mPendingRequestsList.begin();
2501            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2502        // Flush out all entries with less or equal frame numbers.
2503
2504        camera3_capture_result_t result;
2505        memset(&result, 0, sizeof(camera3_capture_result_t));
2506
2507        CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2508        i->partial_result_cnt++;
2509        result.partial_result = i->partial_result_cnt;
2510
2511        // Check whether any stream buffer corresponding to this is dropped or not
2512        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2513        // The API does not expect a blob buffer to be dropped
2514        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2515            /* Clear notify_msg structure */
2516            camera3_notify_msg_t notify_msg;
2517            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2518            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2519                    j != i->buffers.end(); j++) {
2520               if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
2521                   QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2522                   uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2523                   for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2524                       if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2525                           // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2526                           ALOGW("%s: Start of reporting error frame#=%u, streamID=%u",
2527                                   __func__, i->frame_number, streamID);
2528                           notify_msg.type = CAMERA3_MSG_ERROR;
2529                           notify_msg.message.error.frame_number = i->frame_number;
2530                           notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2531                           notify_msg.message.error.error_stream = j->stream;
2532                           mCallbackOps->notify(mCallbackOps, &notify_msg);
2533                           ALOGW("%s: End of reporting error frame#=%u, streamID=%u",
2534                                  __func__, i->frame_number, streamID);
2535                           PendingFrameDropInfo PendingFrameDrop;
2536                           PendingFrameDrop.frame_number=i->frame_number;
2537                           PendingFrameDrop.stream_ID = streamID;
2538                           // Add the Frame drop info to mPendingFrameDropList
2539                           mPendingFrameDropList.push_back(PendingFrameDrop);
2540                      }
2541                   }
2542               } else {
2543                   ALOGE("%s: JPEG buffer dropped for frame number %d",
2544                           __func__, i->frame_number);
2545               }
2546            }
2547        }
2548
2549        // Send empty metadata with already filled buffers for dropped metadata
2550        // and send valid metadata with already filled buffers for current metadata
2551        /* we could hit this case when we either
2552         * 1. have a pending reprocess request or
2553         * 2. miss a metadata buffer callback */
2554        if (i->frame_number < frame_number) {
2555            if (i->input_buffer) {
2556                /* this will be handled in handleInputBufferWithLock */
2557                i++;
2558                continue;
2559            } else {
2560                ALOGE("%s: Fatal: Missing metadata buffer for frame number %d", __func__, i->frame_number);
2561                if (free_and_bufdone_meta_buf) {
2562                    mMetadataChannel->bufDone(metadata_buf);
2563                    free(metadata_buf);
2564                }
2565                camera3_notify_msg_t notify_msg;
2566                memset(&notify_msg, 0, sizeof(notify_msg));
2567                notify_msg.type = CAMERA3_MSG_ERROR;
2568                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
2569                mCallbackOps->notify(mCallbackOps, &notify_msg);
2570                goto done_metadata;
2571            }
2572        } else {
2573            mPendingLiveRequest--;
2574            /* Clear notify_msg structure */
2575            camera3_notify_msg_t notify_msg;
2576            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2577
2578            // Send shutter notify to frameworks
2579            notify_msg.type = CAMERA3_MSG_SHUTTER;
2580            notify_msg.message.shutter.frame_number = i->frame_number;
2581            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2582            mCallbackOps->notify(mCallbackOps, &notify_msg);
2583
2584            i->timestamp = capture_time;
2585
2586            // Find channel requiring metadata, meaning internal offline postprocess
2587            // is needed.
2588            //TODO: for now, we don't support two streams requiring metadata at the same time.
2589            // (because we are not making copies, and metadata buffer is not reference counted.
2590            bool internalPproc = false;
2591            for (pendingBufferIterator iter = i->buffers.begin();
2592                    iter != i->buffers.end(); iter++) {
2593                if (iter->need_metadata) {
2594                    internalPproc = true;
2595                    QCamera3ProcessingChannel *channel =
2596                            (QCamera3ProcessingChannel *)iter->stream->priv;
2597                    channel->queueReprocMetadata(metadata_buf);
2598                    break;
2599                }
2600            }
2601
2602            result.result = translateFromHalMetadata(metadata,
2603                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2604                    i->capture_intent, internalPproc);
2605
2606            saveExifParams(metadata);
2607
2608            if (i->blob_request) {
2609                {
2610                    //Dump tuning metadata if enabled and available
2611                    char prop[PROPERTY_VALUE_MAX];
2612                    memset(prop, 0, sizeof(prop));
2613                    property_get("persist.camera.dumpmetadata", prop, "0");
2614                    int32_t enabled = atoi(prop);
2615                    if (enabled && metadata->is_tuning_params_valid) {
2616                        dumpMetadataToFile(metadata->tuning_params,
2617                               mMetaFrameCount,
2618                               enabled,
2619                               "Snapshot",
2620                               frame_number);
2621                    }
2622                }
2623            }
2624
2625            if (!internalPproc) {
2626                CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2627                // Return metadata buffer
2628                if (free_and_bufdone_meta_buf) {
2629                    mMetadataChannel->bufDone(metadata_buf);
2630                    free(metadata_buf);
2631                }
2632            }
2633        }
2634        if (!result.result) {
2635            ALOGE("%s: metadata is NULL", __func__);
2636        }
2637        result.frame_number = i->frame_number;
2638        result.input_buffer = i->input_buffer;
2639        result.num_output_buffers = 0;
2640        result.output_buffers = NULL;
2641        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2642                    j != i->buffers.end(); j++) {
2643            if (j->buffer) {
2644                result.num_output_buffers++;
2645            }
2646        }
2647
2648        if (result.num_output_buffers > 0) {
2649            camera3_stream_buffer_t *result_buffers =
2650                new camera3_stream_buffer_t[result.num_output_buffers];
2651            if (!result_buffers) {
2652                ALOGE("%s: Fatal error: out of memory", __func__);
2653            }
2654            size_t result_buffers_idx = 0;
2655            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2656                    j != i->buffers.end(); j++) {
2657                if (j->buffer) {
2658                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2659                            m != mPendingFrameDropList.end(); m++) {
2660                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2661                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2662                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2663                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2664                            ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2665                                  __func__, frame_number, streamID);
2666                            m = mPendingFrameDropList.erase(m);
2667                            break;
2668                        }
2669                    }
2670
2671                    for (List<PendingBufferInfo>::iterator k =
2672                      mPendingBuffersMap.mPendingBufferList.begin();
2673                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2674                      if (k->buffer == j->buffer->buffer) {
2675                        CDBG("%s: Found buffer %p in pending buffer List "
2676                              "for frame %u, Take it out!!", __func__,
2677                               k->buffer, k->frame_number);
2678                        mPendingBuffersMap.num_buffers--;
2679                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2680                        break;
2681                      }
2682                    }
2683
2684                    result_buffers[result_buffers_idx++] = *(j->buffer);
2685                    free(j->buffer);
2686                    j->buffer = NULL;
2687                }
2688            }
2689            result.output_buffers = result_buffers;
2690            mCallbackOps->process_capture_result(mCallbackOps, &result);
2691            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2692                    __func__, __LINE__, result.frame_number, i->timestamp);
2693            free_camera_metadata((camera_metadata_t *)result.result);
2694            delete[] result_buffers;
2695        } else {
2696            mCallbackOps->process_capture_result(mCallbackOps, &result);
2697            CDBG("%s %d: meta frame_number = %u, capture_time = %lld",
2698                        __func__, __LINE__, result.frame_number, i->timestamp);
2699            free_camera_metadata((camera_metadata_t *)result.result);
2700        }
2701
2702        i = erasePendingRequest(i);
2703
2704        if (!mPendingReprocessResultList.empty()) {
2705            handlePendingReprocResults(frame_number + 1);
2706        }
2707    }
2708
2709done_metadata:
2710    for (pendingRequestIterator i = mPendingRequestsList.begin();
2711            i != mPendingRequestsList.end() ;i++) {
2712        i->pipeline_depth++;
2713    }
2714    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2715    unblockRequestIfNecessary();
2716
2717}
2718
2719/*===========================================================================
2720 * FUNCTION   : hdrPlusPerfLock
2721 *
2722 * DESCRIPTION: perf lock for HDR+ using custom intent
2723 *
2724 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2725 *
2726 * RETURN     : None
2727 *
2728 *==========================================================================*/
2729void QCamera3HardwareInterface::hdrPlusPerfLock(
2730        mm_camera_super_buf_t *metadata_buf)
2731{
2732    if (NULL == metadata_buf) {
2733        ALOGE("%s: metadata_buf is NULL", __func__);
2734        return;
2735    }
2736    metadata_buffer_t *metadata =
2737            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2738    int32_t *p_frame_number_valid =
2739            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2740    uint32_t *p_frame_number =
2741            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2742
2743    //acquire perf lock for 5 sec after the last HDR frame is captured
2744    if (*p_frame_number_valid) {
2745        if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2746            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2747        }
2748    }
2749
2750    //release lock after perf lock timer is expired. If lock is already released,
2751    //isTimerReset returns false
2752    if (m_perfLock.isTimerReset()) {
2753        mLastCustIntentFrmNum = -1;
2754        m_perfLock.lock_rel_timed();
2755    }
2756}
2757
2758/*===========================================================================
2759 * FUNCTION   : handleInputBufferWithLock
2760 *
2761 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
2762 *
2763 * PARAMETERS : @frame_number: frame number of the input buffer
2764 *
2765 * RETURN     :
2766 *
2767 *==========================================================================*/
2768void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
2769{
2770    ATRACE_CALL();
2771    pendingRequestIterator i = mPendingRequestsList.begin();
2772    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2773        i++;
2774    }
2775    if (i != mPendingRequestsList.end() && i->input_buffer) {
2776        //found the right request
2777        if (!i->shutter_notified) {
2778            CameraMetadata settings;
2779            camera3_notify_msg_t notify_msg;
2780            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2781            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2782            if(i->settings) {
2783                settings = i->settings;
2784                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2785                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2786                } else {
2787                    ALOGE("%s: No timestamp in input settings! Using current one.",
2788                            __func__);
2789                }
2790            } else {
2791                ALOGE("%s: Input settings missing!", __func__);
2792            }
2793
2794            notify_msg.type = CAMERA3_MSG_SHUTTER;
2795            notify_msg.message.shutter.frame_number = frame_number;
2796            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2797            mCallbackOps->notify(mCallbackOps, &notify_msg);
2798            i->shutter_notified = true;
2799            CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2800                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2801        }
2802
2803        if (i->input_buffer->release_fence != -1) {
2804           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2805           close(i->input_buffer->release_fence);
2806           if (rc != OK) {
2807               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2808           }
2809        }
2810
2811        camera3_capture_result result;
2812        memset(&result, 0, sizeof(camera3_capture_result));
2813        result.frame_number = frame_number;
2814        result.result = i->settings;
2815        result.input_buffer = i->input_buffer;
2816        result.partial_result = PARTIAL_RESULT_COUNT;
2817
2818        mCallbackOps->process_capture_result(mCallbackOps, &result);
2819        CDBG("%s: Input request metadata and input buffer frame_number = %u",
2820                       __func__, i->frame_number);
2821        i = erasePendingRequest(i);
2822    } else {
2823        ALOGE("%s: Could not find input request for frame number %d", __func__, frame_number);
2824    }
2825}
2826
2827/*===========================================================================
2828 * FUNCTION   : handleBufferWithLock
2829 *
2830 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
2831 *
2832 * PARAMETERS : @buffer: image buffer for the callback
2833 *              @frame_number: frame number of the image buffer
2834 *
2835 * RETURN     :
2836 *
2837 *==========================================================================*/
2838void QCamera3HardwareInterface::handleBufferWithLock(
2839    camera3_stream_buffer_t *buffer, uint32_t frame_number)
2840{
2841    ATRACE_CALL();
2842    // If the frame number doesn't exist in the pending request list,
2843    // directly send the buffer to the frameworks, and update pending buffers map
2844    // Otherwise, book-keep the buffer.
2845    pendingRequestIterator i = mPendingRequestsList.begin();
2846    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2847        i++;
2848    }
2849    if (i == mPendingRequestsList.end()) {
2850        // Verify all pending requests frame_numbers are greater
2851        for (pendingRequestIterator j = mPendingRequestsList.begin();
2852                j != mPendingRequestsList.end(); j++) {
2853            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
2854                ALOGE("%s: Error: pending live frame number %d is smaller than %d",
2855                        __func__, j->frame_number, frame_number);
2856            }
2857        }
2858        camera3_capture_result_t result;
2859        memset(&result, 0, sizeof(camera3_capture_result_t));
2860        result.result = NULL;
2861        result.frame_number = frame_number;
2862        result.num_output_buffers = 1;
2863        result.partial_result = 0;
2864        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2865                m != mPendingFrameDropList.end(); m++) {
2866            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
2867            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2868            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
2869                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2870                CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
2871                        __func__, frame_number, streamID);
2872                m = mPendingFrameDropList.erase(m);
2873                break;
2874            }
2875        }
2876        result.output_buffers = buffer;
2877        CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
2878                __func__, frame_number, buffer->buffer);
2879
2880        for (List<PendingBufferInfo>::iterator k =
2881                mPendingBuffersMap.mPendingBufferList.begin();
2882                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2883            if (k->buffer == buffer->buffer) {
2884                CDBG("%s: Found Frame buffer, take it out from list",
2885                        __func__);
2886
2887                mPendingBuffersMap.num_buffers--;
2888                k = mPendingBuffersMap.mPendingBufferList.erase(k);
2889                break;
2890            }
2891        }
2892        CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2893            __func__, mPendingBuffersMap.num_buffers);
2894
2895        mCallbackOps->process_capture_result(mCallbackOps, &result);
2896    } else {
2897        if (i->input_buffer) {
2898            CameraMetadata settings;
2899            camera3_notify_msg_t notify_msg;
2900            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2901            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2902            if(i->settings) {
2903                settings = i->settings;
2904                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2905                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2906                } else {
2907                    ALOGE("%s: No timestamp in input settings! Using current one.",
2908                            __func__);
2909                }
2910            } else {
2911                ALOGE("%s: Input settings missing!", __func__);
2912            }
2913
2914            notify_msg.type = CAMERA3_MSG_SHUTTER;
2915            notify_msg.message.shutter.frame_number = frame_number;
2916            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2917
2918            if (i->input_buffer->release_fence != -1) {
2919               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2920               close(i->input_buffer->release_fence);
2921               if (rc != OK) {
2922               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2923               }
2924            }
2925
2926            for (List<PendingBufferInfo>::iterator k =
2927                    mPendingBuffersMap.mPendingBufferList.begin();
2928                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
2929                if (k->buffer == buffer->buffer) {
2930                    CDBG("%s: Found Frame buffer, take it out from list",
2931                            __func__);
2932
2933                    mPendingBuffersMap.num_buffers--;
2934                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
2935                    break;
2936                }
2937            }
2938            CDBG("%s: mPendingBuffersMap.num_buffers = %d",
2939                __func__, mPendingBuffersMap.num_buffers);
2940
2941            bool notifyNow = true;
2942            for (pendingRequestIterator j = mPendingRequestsList.begin();
2943                    j != mPendingRequestsList.end(); j++) {
2944                if (j->frame_number < frame_number) {
2945                    notifyNow = false;
2946                    break;
2947                }
2948            }
2949
2950            if (notifyNow) {
2951                camera3_capture_result result;
2952                memset(&result, 0, sizeof(camera3_capture_result));
2953                result.frame_number = frame_number;
2954                result.result = i->settings;
2955                result.input_buffer = i->input_buffer;
2956                result.num_output_buffers = 1;
2957                result.output_buffers = buffer;
2958                result.partial_result = PARTIAL_RESULT_COUNT;
2959
2960                mCallbackOps->notify(mCallbackOps, &notify_msg);
2961                mCallbackOps->process_capture_result(mCallbackOps, &result);
2962                CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
2963                i = erasePendingRequest(i);
2964            } else {
2965                // Cache reprocess result for later
2966                PendingReprocessResult pendingResult;
2967                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
2968                pendingResult.notify_msg = notify_msg;
2969                pendingResult.buffer = *buffer;
2970                pendingResult.frame_number = frame_number;
2971                mPendingReprocessResultList.push_back(pendingResult);
2972                CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
2973            }
2974        } else {
2975            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2976                j != i->buffers.end(); j++) {
2977                if (j->stream == buffer->stream) {
2978                    if (j->buffer != NULL) {
2979                        ALOGE("%s: Error: buffer is already set", __func__);
2980                    } else {
2981                        j->buffer = (camera3_stream_buffer_t *)malloc(
2982                            sizeof(camera3_stream_buffer_t));
2983                        *(j->buffer) = *buffer;
2984                        CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
2985                            __func__, buffer, frame_number);
2986                    }
2987                }
2988            }
2989        }
2990    }
2991}
2992
2993/*===========================================================================
2994 * FUNCTION   : unblockRequestIfNecessary
2995 *
2996 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
2997 *              that mMutex is held when this function is called.
2998 *
2999 * PARAMETERS :
3000 *
3001 * RETURN     :
3002 *
3003 *==========================================================================*/
3004void QCamera3HardwareInterface::unblockRequestIfNecessary()
3005{
3006   // Unblock process_capture_request
3007   pthread_cond_signal(&mRequestCond);
3008}
3009
3010
3011/*===========================================================================
3012 * FUNCTION   : processCaptureRequest
3013 *
3014 * DESCRIPTION: process a capture request from camera service
3015 *
3016 * PARAMETERS :
3017 *   @request : request from framework to process
3018 *
3019 * RETURN     :
3020 *
3021 *==========================================================================*/
3022int QCamera3HardwareInterface::processCaptureRequest(
3023                    camera3_capture_request_t *request)
3024{
3025    ATRACE_CALL();
3026    int rc = NO_ERROR;
3027    int32_t request_id;
3028    CameraMetadata meta;
3029    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3030    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3031    bool isVidBufRequested = false;
3032    camera3_stream_buffer_t *pInputBuffer = NULL;
3033
3034    pthread_mutex_lock(&mMutex);
3035
3036    rc = validateCaptureRequest(request);
3037    if (rc != NO_ERROR) {
3038        ALOGE("%s: incoming request is not valid", __func__);
3039        pthread_mutex_unlock(&mMutex);
3040        return rc;
3041    }
3042
3043    meta = request->settings;
3044
3045    // For first capture request, send capture intent, and
3046    // stream on all streams
3047    if (mFirstRequest) {
3048        // send an unconfigure to the backend so that the isp
3049        // resources are deallocated
3050        if (!mFirstConfiguration) {
3051            cam_stream_size_info_t stream_config_info;
3052            int32_t hal_version = CAM_HAL_V3;
3053            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3054            stream_config_info.buffer_info.min_buffers =
3055                    MIN_INFLIGHT_REQUESTS;
3056            stream_config_info.buffer_info.max_buffers =
3057                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3058            clear_metadata_buffer(mParameters);
3059            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3060                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3061            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3062                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3063            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3064                    mParameters);
3065            if (rc < 0) {
3066                ALOGE("%s: set_parms for unconfigure failed", __func__);
3067                pthread_mutex_unlock(&mMutex);
3068                return rc;
3069            }
3070        }
3071        m_perfLock.lock_acq();
3072        /* get eis information for stream configuration */
3073        cam_is_type_t is_type;
3074        char is_type_value[PROPERTY_VALUE_MAX];
3075        property_get("persist.camera.is_type", is_type_value, "0");
3076        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3077
3078        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3079            int32_t hal_version = CAM_HAL_V3;
3080            uint8_t captureIntent =
3081                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3082            mCaptureIntent = captureIntent;
3083            clear_metadata_buffer(mParameters);
3084            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3085            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3086        }
3087
3088        //If EIS is enabled, turn it on for video
3089        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3090        int32_t vsMode;
3091        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3092        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3093            rc = BAD_VALUE;
3094        }
3095
3096        //IS type will be 0 unless EIS is supported. If EIS is supported
3097        //it could either be 1 or 4 depending on the stream and video size
3098        if (setEis) {
3099            if (!m_bEisSupportedSize) {
3100                is_type = IS_TYPE_DIS;
3101            } else {
3102                is_type = IS_TYPE_EIS_2_0;
3103            }
3104            mStreamConfigInfo.is_type = is_type;
3105        } else {
3106            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3107        }
3108
3109        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3110                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3111        int32_t tintless_value = 1;
3112        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3113                CAM_INTF_PARM_TINTLESS, tintless_value);
3114        //Disable CDS for HFR mode and if mPprocBypass = true.
3115        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3116        //after every configure_stream
3117        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3118                (m_bIsVideo)) {
3119            int32_t cds = CAM_CDS_MODE_OFF;
3120            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3121                    CAM_INTF_PARM_CDS_MODE, cds))
3122                ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3123
3124        }
3125        setMobicat();
3126
3127        /* Set fps and hfr mode while sending meta stream info so that sensor
3128         * can configure appropriate streaming mode */
3129        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3130        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3131            rc = setHalFpsRange(meta, mParameters);
3132            if (rc != NO_ERROR) {
3133                ALOGE("%s: setHalFpsRange failed", __func__);
3134            }
3135        }
3136        if (meta.exists(ANDROID_CONTROL_MODE)) {
3137            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3138            rc = extractSceneMode(meta, metaMode, mParameters);
3139            if (rc != NO_ERROR) {
3140                ALOGE("%s: extractSceneMode failed", __func__);
3141            }
3142        }
3143
3144        //TODO: validate the arguments, HSV scenemode should have only the
3145        //advertised fps ranges
3146
3147        /*set the capture intent, hal version, tintless, stream info,
3148         *and disenable parameters to the backend*/
3149        CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3150        mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3151                    mParameters);
3152
3153        cam_dimension_t sensor_dim;
3154        memset(&sensor_dim, 0, sizeof(sensor_dim));
3155        rc = getSensorOutputSize(sensor_dim);
3156        if (rc != NO_ERROR) {
3157            ALOGE("%s: Failed to get sensor output size", __func__);
3158            pthread_mutex_unlock(&mMutex);
3159            goto error_exit;
3160        }
3161
3162        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3163                gCamCapability[mCameraId]->active_array_size.height,
3164                sensor_dim.width, sensor_dim.height);
3165
3166        /* Set batchmode before initializing channel. Since registerBuffer
3167         * internally initializes some of the channels, better set batchmode
3168         * even before first register buffer */
3169        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3170            it != mStreamInfo.end(); it++) {
3171            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3172            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3173                    && mBatchSize) {
3174                rc = channel->setBatchSize(mBatchSize);
3175                //Disable per frame map unmap for HFR/batchmode case
3176                rc |= channel->setPerFrameMapUnmap(false);
3177                if (NO_ERROR != rc) {
3178                    ALOGE("%s : Channel init failed %d", __func__, rc);
3179                    pthread_mutex_unlock(&mMutex);
3180                    goto error_exit;
3181                }
3182            }
3183        }
3184
3185        //First initialize all streams
3186        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3187            it != mStreamInfo.end(); it++) {
3188            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3189            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3190               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3191               setEis)
3192                rc = channel->initialize(is_type);
3193            else {
3194                rc = channel->initialize(IS_TYPE_NONE);
3195            }
3196            if (NO_ERROR != rc) {
3197                ALOGE("%s : Channel initialization failed %d", __func__, rc);
3198                pthread_mutex_unlock(&mMutex);
3199                goto error_exit;
3200            }
3201        }
3202
3203        if (mRawDumpChannel) {
3204            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3205            if (rc != NO_ERROR) {
3206                ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3207                pthread_mutex_unlock(&mMutex);
3208                goto error_exit;
3209            }
3210        }
3211        if (mSupportChannel) {
3212            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3213            if (rc < 0) {
3214                ALOGE("%s: Support channel initialization failed", __func__);
3215                pthread_mutex_unlock(&mMutex);
3216                goto error_exit;
3217            }
3218        }
3219        if (mAnalysisChannel) {
3220            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3221            if (rc < 0) {
3222                ALOGE("%s: Analysis channel initialization failed", __func__);
3223                pthread_mutex_unlock(&mMutex);
3224                goto error_exit;
3225            }
3226        }
3227        if (mDummyBatchChannel) {
3228            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3229            if (rc < 0) {
3230                ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3231                pthread_mutex_unlock(&mMutex);
3232                goto error_exit;
3233            }
3234            rc = mDummyBatchChannel->initialize(is_type);
3235            if (rc < 0) {
3236                ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3237                pthread_mutex_unlock(&mMutex);
3238                goto error_exit;
3239            }
3240        }
3241
3242        // Set bundle info
3243        rc = setBundleInfo();
3244        if (rc < 0) {
3245            ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3246            pthread_mutex_unlock(&mMutex);
3247            goto error_exit;
3248        }
3249
3250        //Then start them.
3251        CDBG_HIGH("%s: Start META Channel", __func__);
3252        rc = mMetadataChannel->start();
3253        if (rc < 0) {
3254            ALOGE("%s: META channel start failed", __func__);
3255            pthread_mutex_unlock(&mMutex);
3256            goto error_exit;
3257        }
3258
3259        if (mAnalysisChannel) {
3260            rc = mAnalysisChannel->start();
3261            if (rc < 0) {
3262                ALOGE("%s: Analysis channel start failed", __func__);
3263                mMetadataChannel->stop();
3264                pthread_mutex_unlock(&mMutex);
3265                goto error_exit;
3266            }
3267        }
3268
3269        if (mSupportChannel) {
3270            rc = mSupportChannel->start();
3271            if (rc < 0) {
3272                ALOGE("%s: Support channel start failed", __func__);
3273                mMetadataChannel->stop();
3274                /* Although support and analysis are mutually exclusive today
3275                   adding it in anycase for future proofing */
3276                if (mAnalysisChannel) {
3277                    mAnalysisChannel->stop();
3278                }
3279                pthread_mutex_unlock(&mMutex);
3280                goto error_exit;
3281            }
3282        }
3283        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3284            it != mStreamInfo.end(); it++) {
3285            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3286            CDBG_HIGH("%s: Start Processing Channel mask=%d",
3287                    __func__, channel->getStreamTypeMask());
3288            rc = channel->start();
3289            if (rc < 0) {
3290                ALOGE("%s: channel start failed", __func__);
3291                pthread_mutex_unlock(&mMutex);
3292                goto error_exit;
3293            }
3294        }
3295
3296        if (mRawDumpChannel) {
3297            CDBG("%s: Starting raw dump stream",__func__);
3298            rc = mRawDumpChannel->start();
3299            if (rc != NO_ERROR) {
3300                ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3301                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3302                      it != mStreamInfo.end(); it++) {
3303                    QCamera3Channel *channel =
3304                        (QCamera3Channel *)(*it)->stream->priv;
3305                    ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3306                        channel->getStreamTypeMask());
3307                    channel->stop();
3308                }
3309                if (mSupportChannel)
3310                    mSupportChannel->stop();
3311                if (mAnalysisChannel) {
3312                    mAnalysisChannel->stop();
3313                }
3314                mMetadataChannel->stop();
3315                pthread_mutex_unlock(&mMutex);
3316                goto error_exit;
3317            }
3318        }
3319
3320        if (mChannelHandle) {
3321
3322            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3323                    mChannelHandle);
3324            if (rc != NO_ERROR) {
3325                ALOGE("%s: start_channel failed %d", __func__, rc);
3326                pthread_mutex_unlock(&mMutex);
3327                goto error_exit;
3328            }
3329        }
3330
3331
3332        goto no_error;
3333error_exit:
3334        m_perfLock.lock_rel();
3335        return rc;
3336no_error:
3337        m_perfLock.lock_rel();
3338
3339        mWokenUpByDaemon = false;
3340        mPendingLiveRequest = 0;
3341        mFirstConfiguration = false;
3342        enablePowerHint();
3343    }
3344
3345    uint32_t frameNumber = request->frame_number;
3346    cam_stream_ID_t streamID;
3347
3348    if (meta.exists(ANDROID_REQUEST_ID)) {
3349        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3350        mCurrentRequestId = request_id;
3351        CDBG("%s: Received request with id: %d",__func__, request_id);
3352    } else if (mFirstRequest || mCurrentRequestId == -1){
3353        ALOGE("%s: Unable to find request id field, \
3354                & no previous id available", __func__);
3355        pthread_mutex_unlock(&mMutex);
3356        return NAME_NOT_FOUND;
3357    } else {
3358        CDBG("%s: Re-using old request id", __func__);
3359        request_id = mCurrentRequestId;
3360    }
3361
3362    CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3363                                    __func__, __LINE__,
3364                                    request->num_output_buffers,
3365                                    request->input_buffer,
3366                                    frameNumber);
3367    // Acquire all request buffers first
3368    streamID.num_streams = 0;
3369    int blob_request = 0;
3370    uint32_t snapshotStreamId = 0;
3371    for (size_t i = 0; i < request->num_output_buffers; i++) {
3372        const camera3_stream_buffer_t& output = request->output_buffers[i];
3373        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3374
3375        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3376            //Call function to store local copy of jpeg data for encode params.
3377            blob_request = 1;
3378            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3379        }
3380
3381        if (output.acquire_fence != -1) {
3382           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3383           close(output.acquire_fence);
3384           if (rc != OK) {
3385              ALOGE("%s: sync wait failed %d", __func__, rc);
3386              pthread_mutex_unlock(&mMutex);
3387              return rc;
3388           }
3389        }
3390
3391        streamID.streamID[streamID.num_streams] =
3392            channel->getStreamID(channel->getStreamTypeMask());
3393        streamID.num_streams++;
3394
3395        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3396            isVidBufRequested = true;
3397        }
3398    }
3399
3400    if (blob_request && mRawDumpChannel) {
3401        CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3402        streamID.streamID[streamID.num_streams] =
3403            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3404        streamID.num_streams++;
3405    }
3406
3407    if(request->input_buffer == NULL) {
3408        /* Parse the settings:
3409         * - For every request in NORMAL MODE
3410         * - For every request in HFR mode during preview only case
3411         * - For first request of every batch in HFR mode during video
3412         * recording. In batchmode the same settings except frame number is
3413         * repeated in each request of the batch.
3414         */
3415        if (!mBatchSize ||
3416           (mBatchSize && !isVidBufRequested) ||
3417           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3418            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3419            if (rc < 0) {
3420                ALOGE("%s: fail to set frame parameters", __func__);
3421                pthread_mutex_unlock(&mMutex);
3422                return rc;
3423            }
3424        }
3425        /* For batchMode HFR, setFrameParameters is not called for every
3426         * request. But only frame number of the latest request is parsed.
3427         * Keep track of first and last frame numbers in a batch so that
3428         * metadata for the frame numbers of batch can be duplicated in
3429         * handleBatchMetadta */
3430        if (mBatchSize) {
3431            if (!mToBeQueuedVidBufs) {
3432                //start of the batch
3433                mFirstFrameNumberInBatch = request->frame_number;
3434            }
3435            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3436                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3437                ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3438                return BAD_VALUE;
3439            }
3440        }
3441        if (mNeedSensorRestart) {
3442            /* Unlock the mutex as restartSensor waits on the channels to be
3443             * stopped, which in turn calls stream callback functions -
3444             * handleBufferWithLock and handleMetadataWithLock */
3445            pthread_mutex_unlock(&mMutex);
3446            rc = dynamicUpdateMetaStreamInfo();
3447            if (rc != NO_ERROR) {
3448                ALOGE("%s: Restarting the sensor failed", __func__);
3449                return BAD_VALUE;
3450            }
3451            mNeedSensorRestart = false;
3452            pthread_mutex_lock(&mMutex);
3453        }
3454    } else {
3455
3456        if (request->input_buffer->acquire_fence != -1) {
3457           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3458           close(request->input_buffer->acquire_fence);
3459           if (rc != OK) {
3460              ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3461              pthread_mutex_unlock(&mMutex);
3462              return rc;
3463           }
3464        }
3465    }
3466
3467    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3468        mLastCustIntentFrmNum = frameNumber;
3469    }
3470    /* Update pending request list and pending buffers map */
3471    PendingRequestInfo pendingRequest;
3472    pendingRequestIterator latestRequest;
3473    pendingRequest.frame_number = frameNumber;
3474    pendingRequest.num_buffers = request->num_output_buffers;
3475    pendingRequest.request_id = request_id;
3476    pendingRequest.blob_request = blob_request;
3477    pendingRequest.timestamp = 0;
3478    pendingRequest.bUrgentReceived = 0;
3479    if (request->input_buffer) {
3480        pendingRequest.input_buffer =
3481                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3482        *(pendingRequest.input_buffer) = *(request->input_buffer);
3483        pInputBuffer = pendingRequest.input_buffer;
3484    } else {
3485       pendingRequest.input_buffer = NULL;
3486       pInputBuffer = NULL;
3487    }
3488
3489    pendingRequest.pipeline_depth = 0;
3490    pendingRequest.partial_result_cnt = 0;
3491    extractJpegMetadata(mCurJpegMeta, request);
3492    pendingRequest.jpegMetadata = mCurJpegMeta;
3493    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3494    pendingRequest.shutter_notified = false;
3495
3496    //extract capture intent
3497    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3498        mCaptureIntent =
3499                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3500    }
3501    pendingRequest.capture_intent = mCaptureIntent;
3502
3503    for (size_t i = 0; i < request->num_output_buffers; i++) {
3504        RequestedBufferInfo requestedBuf;
3505        memset(&requestedBuf, 0, sizeof(requestedBuf));
3506        requestedBuf.stream = request->output_buffers[i].stream;
3507        requestedBuf.buffer = NULL;
3508        pendingRequest.buffers.push_back(requestedBuf);
3509
3510        // Add to buffer handle the pending buffers list
3511        PendingBufferInfo bufferInfo;
3512        bufferInfo.frame_number = frameNumber;
3513        bufferInfo.buffer = request->output_buffers[i].buffer;
3514        bufferInfo.stream = request->output_buffers[i].stream;
3515        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3516        mPendingBuffersMap.num_buffers++;
3517        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3518        CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3519                __func__, frameNumber, bufferInfo.buffer,
3520                channel->getStreamTypeMask(), bufferInfo.stream->format);
3521    }
3522    latestRequest = mPendingRequestsList.insert(
3523            mPendingRequestsList.end(), pendingRequest);
3524    if(mFlush) {
3525        pthread_mutex_unlock(&mMutex);
3526        return NO_ERROR;
3527    }
3528
3529    // Notify metadata channel we receive a request
3530    mMetadataChannel->request(NULL, frameNumber);
3531
3532    if(request->input_buffer != NULL){
3533        CDBG("%s: Input request, frame_number %d", __func__, frameNumber);
3534        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3535        if (NO_ERROR != rc) {
3536            ALOGE("%s: fail to set reproc parameters", __func__);
3537            pthread_mutex_unlock(&mMutex);
3538            return rc;
3539        }
3540    }
3541
3542    // Call request on other streams
3543    uint32_t streams_need_metadata = 0;
3544    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3545    for (size_t i = 0; i < request->num_output_buffers; i++) {
3546        const camera3_stream_buffer_t& output = request->output_buffers[i];
3547        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3548
3549        if (channel == NULL) {
3550            ALOGE("%s: invalid channel pointer for stream", __func__);
3551            continue;
3552        }
3553
3554        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3555            if(request->input_buffer != NULL){
3556                rc = channel->request(output.buffer, frameNumber,
3557                        pInputBuffer, &mReprocMeta);
3558                if (rc < 0) {
3559                    ALOGE("%s: Fail to request on picture channel", __func__);
3560                    pthread_mutex_unlock(&mMutex);
3561                    return rc;
3562                }
3563            } else {
3564                CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3565                        __LINE__, output.buffer, frameNumber);
3566                if (!request->settings) {
3567                    rc = channel->request(output.buffer, frameNumber,
3568                            NULL, mPrevParameters);
3569                } else {
3570                    rc = channel->request(output.buffer, frameNumber,
3571                            NULL, mParameters);
3572                }
3573                if (rc < 0) {
3574                    ALOGE("%s: Fail to request on picture channel", __func__);
3575                    pthread_mutex_unlock(&mMutex);
3576                    return rc;
3577                }
3578                pendingBufferIter->need_metadata = true;
3579                streams_need_metadata++;
3580            }
3581        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3582            bool needMetadata = false;
3583            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3584            rc = yuvChannel->request(output.buffer, frameNumber,
3585                    pInputBuffer,
3586                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3587            if (rc < 0) {
3588                ALOGE("%s: Fail to request on YUV channel", __func__);
3589                pthread_mutex_unlock(&mMutex);
3590                return rc;
3591            }
3592            pendingBufferIter->need_metadata = needMetadata;
3593            if (needMetadata)
3594                streams_need_metadata += 1;
3595            CDBG("%s: calling YUV channel request, need_metadata is %d",
3596                    __func__, needMetadata);
3597        } else {
3598            CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3599                __LINE__, output.buffer, frameNumber);
3600            rc = channel->request(output.buffer, frameNumber);
3601            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3602                    && mBatchSize) {
3603                mToBeQueuedVidBufs++;
3604                if (mToBeQueuedVidBufs == mBatchSize) {
3605                    channel->queueBatchBuf();
3606                }
3607            }
3608            if (rc < 0) {
3609                ALOGE("%s: request failed", __func__);
3610                pthread_mutex_unlock(&mMutex);
3611                return rc;
3612            }
3613        }
3614        pendingBufferIter++;
3615    }
3616
3617    //If 2 streams have need_metadata set to true, fail the request, unless
3618    //we copy/reference count the metadata buffer
3619    if (streams_need_metadata > 1) {
3620        ALOGE("%s: not supporting request in which two streams requires"
3621                " 2 HAL metadata for reprocessing", __func__);
3622        pthread_mutex_unlock(&mMutex);
3623        return -EINVAL;
3624    }
3625
3626    if(request->input_buffer == NULL) {
3627        /* Set the parameters to backend:
3628         * - For every request in NORMAL MODE
3629         * - For every request in HFR mode during preview only case
3630         * - Once every batch in HFR mode during video recording
3631         */
3632        if (!mBatchSize ||
3633           (mBatchSize && !isVidBufRequested) ||
3634           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3635            CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3636                    __func__, mBatchSize, isVidBufRequested,
3637                    mToBeQueuedVidBufs);
3638            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3639                    mParameters);
3640            if (rc < 0) {
3641                ALOGE("%s: set_parms failed", __func__);
3642            }
3643            /* reset to zero coz, the batch is queued */
3644            mToBeQueuedVidBufs = 0;
3645            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3646        }
3647        mPendingLiveRequest++;
3648    }
3649
3650    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3651
3652    mFirstRequest = false;
3653    // Added a timed condition wait
3654    struct timespec ts;
3655    uint8_t isValidTimeout = 1;
3656    rc = clock_gettime(CLOCK_REALTIME, &ts);
3657    if (rc < 0) {
3658      isValidTimeout = 0;
3659      ALOGE("%s: Error reading the real time clock!!", __func__);
3660    }
3661    else {
3662      // Make timeout as 5 sec for request to be honored
3663      ts.tv_sec += 5;
3664    }
3665    //Block on conditional variable
3666    if (mBatchSize) {
3667        /* For HFR, more buffers are dequeued upfront to improve the performance */
3668        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3669        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3670    }
3671    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3672        if (!isValidTimeout) {
3673            CDBG("%s: Blocking on conditional wait", __func__);
3674            pthread_cond_wait(&mRequestCond, &mMutex);
3675        }
3676        else {
3677            CDBG("%s: Blocking on timed conditional wait", __func__);
3678            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3679            if (rc == ETIMEDOUT) {
3680                rc = -ENODEV;
3681                ALOGE("%s: Unblocked on timeout!!!!", __func__);
3682                break;
3683            }
3684        }
3685        CDBG("%s: Unblocked", __func__);
3686        if (mWokenUpByDaemon) {
3687            mWokenUpByDaemon = false;
3688            if (mPendingLiveRequest < maxInFlightRequests)
3689                break;
3690        }
3691    }
3692    pthread_mutex_unlock(&mMutex);
3693
3694    return rc;
3695}
3696
3697/*===========================================================================
3698 * FUNCTION   : dump
3699 *
3700 * DESCRIPTION:
3701 *
3702 * PARAMETERS :
3703 *
3704 *
3705 * RETURN     :
3706 *==========================================================================*/
3707void QCamera3HardwareInterface::dump(int fd)
3708{
3709    pthread_mutex_lock(&mMutex);
3710    dprintf(fd, "\n Camera HAL3 information Begin \n");
3711
3712    dprintf(fd, "\nNumber of pending requests: %zu \n",
3713        mPendingRequestsList.size());
3714    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3715    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3716    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3717    for(pendingRequestIterator i = mPendingRequestsList.begin();
3718            i != mPendingRequestsList.end(); i++) {
3719        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
3720        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
3721        i->input_buffer);
3722    }
3723    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
3724                mPendingBuffersMap.num_buffers);
3725    dprintf(fd, "-------+------------------\n");
3726    dprintf(fd, " Frame | Stream type mask \n");
3727    dprintf(fd, "-------+------------------\n");
3728    for(List<PendingBufferInfo>::iterator i =
3729        mPendingBuffersMap.mPendingBufferList.begin();
3730        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
3731        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
3732        dprintf(fd, " %5d | %11d \n",
3733                i->frame_number, channel->getStreamTypeMask());
3734    }
3735    dprintf(fd, "-------+------------------\n");
3736
3737    dprintf(fd, "\nPending frame drop list: %zu\n",
3738        mPendingFrameDropList.size());
3739    dprintf(fd, "-------+-----------\n");
3740    dprintf(fd, " Frame | Stream ID \n");
3741    dprintf(fd, "-------+-----------\n");
3742    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
3743        i != mPendingFrameDropList.end(); i++) {
3744        dprintf(fd, " %5d | %9d \n",
3745            i->frame_number, i->stream_ID);
3746    }
3747    dprintf(fd, "-------+-----------\n");
3748
3749    dprintf(fd, "\n Camera HAL3 information End \n");
3750
3751    /* use dumpsys media.camera as trigger to send update debug level event */
3752    mUpdateDebugLevel = true;
3753    pthread_mutex_unlock(&mMutex);
3754    return;
3755}
3756
3757/*===========================================================================
3758 * FUNCTION   : flush
3759 *
3760 * DESCRIPTION:
3761 *
3762 * PARAMETERS :
3763 *
3764 *
3765 * RETURN     :
3766 *==========================================================================*/
3767int QCamera3HardwareInterface::flush()
3768{
3769    ATRACE_CALL();
3770    int32_t rc = NO_ERROR;
3771
3772    CDBG("%s: Unblocking Process Capture Request", __func__);
3773    pthread_mutex_lock(&mMutex);
3774
3775    if (mFirstRequest) {
3776        pthread_mutex_unlock(&mMutex);
3777        return NO_ERROR;
3778    }
3779
3780    mFlush = true;
3781    pthread_mutex_unlock(&mMutex);
3782
3783    rc = stopAllChannels();
3784    if (rc < 0) {
3785        ALOGE("%s: stopAllChannels failed", __func__);
3786        return rc;
3787    }
3788    if (mChannelHandle) {
3789        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
3790                mChannelHandle);
3791    }
3792
3793    // Reset bundle info
3794    rc = setBundleInfo();
3795    if (rc < 0) {
3796        ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3797        return rc;
3798    }
3799
3800    // Mutex Lock
3801    pthread_mutex_lock(&mMutex);
3802
3803    // Unblock process_capture_request
3804    mPendingLiveRequest = 0;
3805    pthread_cond_signal(&mRequestCond);
3806
3807    rc = notifyErrorForPendingRequests();
3808    if (rc < 0) {
3809        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
3810        pthread_mutex_unlock(&mMutex);
3811        return rc;
3812    }
3813
3814    mFlush = false;
3815
3816    // Start the Streams/Channels
3817    rc = startAllChannels();
3818    if (rc < 0) {
3819        ALOGE("%s: startAllChannels failed", __func__);
3820        pthread_mutex_unlock(&mMutex);
3821        return rc;
3822    }
3823
3824    if (mChannelHandle) {
3825        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3826                    mChannelHandle);
3827        if (rc < 0) {
3828            ALOGE("%s: start_channel failed", __func__);
3829            pthread_mutex_unlock(&mMutex);
3830            return rc;
3831        }
3832    }
3833
3834    pthread_mutex_unlock(&mMutex);
3835
3836    return 0;
3837}
3838
3839/*===========================================================================
3840 * FUNCTION   : captureResultCb
3841 *
3842 * DESCRIPTION: Callback handler for all capture result
3843 *              (streams, as well as metadata)
3844 *
3845 * PARAMETERS :
3846 *   @metadata : metadata information
3847 *   @buffer   : actual gralloc buffer to be returned to frameworks.
3848 *               NULL if metadata.
3849 *
3850 * RETURN     : NONE
3851 *==========================================================================*/
3852void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
3853                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
3854{
3855    if (metadata_buf) {
3856        if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
3857            handleBatchMetadata(metadata_buf,
3858                    true /* free_and_bufdone_meta_buf */);
3859        } else { /* mBatchSize = 0 */
3860            hdrPlusPerfLock(metadata_buf);
3861            pthread_mutex_lock(&mMutex);
3862            handleMetadataWithLock(metadata_buf,
3863                    true /* free_and_bufdone_meta_buf */);
3864            pthread_mutex_unlock(&mMutex);
3865        }
3866    } else if (isInputBuffer) {
3867        pthread_mutex_lock(&mMutex);
3868        handleInputBufferWithLock(frame_number);
3869        pthread_mutex_unlock(&mMutex);
3870    } else {
3871        pthread_mutex_lock(&mMutex);
3872        handleBufferWithLock(buffer, frame_number);
3873        pthread_mutex_unlock(&mMutex);
3874    }
3875    return;
3876}
3877
3878/*===========================================================================
3879 * FUNCTION   : getReprocessibleOutputStreamId
3880 *
3881 * DESCRIPTION: Get source output stream id for the input reprocess stream
3882 *              based on size and format, which would be the largest
3883 *              output stream if an input stream exists.
3884 *
3885 * PARAMETERS :
3886 *   @id      : return the stream id if found
3887 *
3888 * RETURN     : int32_t type of status
3889 *              NO_ERROR  -- success
3890 *              none-zero failure code
3891 *==========================================================================*/
3892int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
3893{
3894    stream_info_t* stream = NULL;
3895
3896    /* check if any output or bidirectional stream with the same size and format
3897       and return that stream */
3898    if ((mInputStreamInfo.dim.width > 0) &&
3899            (mInputStreamInfo.dim.height > 0)) {
3900        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3901                it != mStreamInfo.end(); it++) {
3902
3903            camera3_stream_t *stream = (*it)->stream;
3904            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
3905                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
3906                    (stream->format == mInputStreamInfo.format)) {
3907                // Usage flag for an input stream and the source output stream
3908                // may be different.
3909                CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
3910                CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
3911                        __func__, stream->usage, mInputStreamInfo.usage);
3912
3913                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
3914                if (channel != NULL && channel->mStreams[0]) {
3915                    id = channel->mStreams[0]->getMyServerID();
3916                    return NO_ERROR;
3917                }
3918            }
3919        }
3920    } else {
3921        CDBG("%s: No input stream, so no reprocessible output stream", __func__);
3922    }
3923    return NAME_NOT_FOUND;
3924}
3925
3926/*===========================================================================
3927 * FUNCTION   : lookupFwkName
3928 *
3929 * DESCRIPTION: In case the enum is not same in fwk and backend
3930 *              make sure the parameter is correctly propogated
3931 *
3932 * PARAMETERS  :
3933 *   @arr      : map between the two enums
3934 *   @len      : len of the map
3935 *   @hal_name : name of the hal_parm to map
3936 *
3937 * RETURN     : int type of status
3938 *              fwk_name  -- success
3939 *              none-zero failure code
3940 *==========================================================================*/
3941template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
3942        size_t len, halType hal_name)
3943{
3944
3945    for (size_t i = 0; i < len; i++) {
3946        if (arr[i].hal_name == hal_name) {
3947            return arr[i].fwk_name;
3948        }
3949    }
3950
3951    /* Not able to find matching framework type is not necessarily
3952     * an error case. This happens when mm-camera supports more attributes
3953     * than the frameworks do */
3954    CDBG_HIGH("%s: Cannot find matching framework type", __func__);
3955    return NAME_NOT_FOUND;
3956}
3957
3958/*===========================================================================
3959 * FUNCTION   : lookupHalName
3960 *
3961 * DESCRIPTION: In case the enum is not same in fwk and backend
3962 *              make sure the parameter is correctly propogated
3963 *
3964 * PARAMETERS  :
3965 *   @arr      : map between the two enums
3966 *   @len      : len of the map
3967 *   @fwk_name : name of the hal_parm to map
3968 *
3969 * RETURN     : int32_t type of status
3970 *              hal_name  -- success
3971 *              none-zero failure code
3972 *==========================================================================*/
3973template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
3974        size_t len, fwkType fwk_name)
3975{
3976    for (size_t i = 0; i < len; i++) {
3977        if (arr[i].fwk_name == fwk_name) {
3978            return arr[i].hal_name;
3979        }
3980    }
3981
3982    ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
3983    return NAME_NOT_FOUND;
3984}
3985
3986/*===========================================================================
3987 * FUNCTION   : lookupProp
3988 *
3989 * DESCRIPTION: lookup a value by its name
3990 *
3991 * PARAMETERS :
3992 *   @arr     : map between the two enums
3993 *   @len     : size of the map
3994 *   @name    : name to be looked up
3995 *
3996 * RETURN     : Value if found
3997 *              CAM_CDS_MODE_MAX if not found
3998 *==========================================================================*/
3999template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4000        size_t len, const char *name)
4001{
4002    if (name) {
4003        for (size_t i = 0; i < len; i++) {
4004            if (!strcmp(arr[i].desc, name)) {
4005                return arr[i].val;
4006            }
4007        }
4008    }
4009    return CAM_CDS_MODE_MAX;
4010}
4011
4012/*===========================================================================
4013 *
4014 * DESCRIPTION:
4015 *
4016 * PARAMETERS :
4017 *   @metadata : metadata information from callback
4018 *   @timestamp: metadata buffer timestamp
4019 *   @request_id: request id
4020 *   @jpegMetadata: additional jpeg metadata
4021 *   @pprocDone: whether internal offline postprocsesing is done
4022 *
4023 * RETURN     : camera_metadata_t*
4024 *              metadata in a format specified by fwk
4025 *==========================================================================*/
4026camera_metadata_t*
4027QCamera3HardwareInterface::translateFromHalMetadata(
4028                                 metadata_buffer_t *metadata,
4029                                 nsecs_t timestamp,
4030                                 int32_t request_id,
4031                                 const CameraMetadata& jpegMetadata,
4032                                 uint8_t pipeline_depth,
4033                                 uint8_t capture_intent,
4034                                 bool pprocDone)
4035{
4036    CameraMetadata camMetadata;
4037    camera_metadata_t *resultMetadata;
4038
4039    if (jpegMetadata.entryCount())
4040        camMetadata.append(jpegMetadata);
4041
4042    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4043    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4044    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4045    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4046
4047    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4048        int64_t fwk_frame_number = *frame_number;
4049        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4050    }
4051
4052    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4053        int32_t fps_range[2];
4054        fps_range[0] = (int32_t)float_range->min_fps;
4055        fps_range[1] = (int32_t)float_range->max_fps;
4056        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4057                                      fps_range, 2);
4058        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4059            __func__, fps_range[0], fps_range[1]);
4060    }
4061
4062    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4063        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4064    }
4065
4066    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4067        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4068                METADATA_MAP_SIZE(SCENE_MODES_MAP),
4069                *sceneMode);
4070        if (NAME_NOT_FOUND != val) {
4071            uint8_t fwkSceneMode = (uint8_t)val;
4072            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4073            CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4074                    __func__, fwkSceneMode);
4075        }
4076    }
4077
4078    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4079        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4080        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4081    }
4082
4083    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4084        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4085        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4086    }
4087
4088    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4089        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4090        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4091    }
4092
4093    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4094            CAM_INTF_META_EDGE_MODE, metadata) {
4095        uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
4096        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4097    }
4098
4099    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4100        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4101        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4102    }
4103
4104    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4105        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4106    }
4107
4108    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4109        if (0 <= *flashState) {
4110            uint8_t fwk_flashState = (uint8_t) *flashState;
4111            if (!gCamCapability[mCameraId]->flash_available) {
4112                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4113            }
4114            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4115        }
4116    }
4117
4118    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4119        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4120        if (NAME_NOT_FOUND != val) {
4121            uint8_t fwk_flashMode = (uint8_t)val;
4122            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4123        }
4124    }
4125
4126    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4127        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4128        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4129    }
4130
4131    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4132        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4133    }
4134
4135    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4136        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4137    }
4138
4139    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4140        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4141    }
4142
4143    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4144        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4145        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4146    }
4147
4148    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4149        uint8_t fwk_videoStab = (uint8_t) *videoStab;
4150        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4151    }
4152
4153    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4154        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4155        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4156    }
4157
4158    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4159        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4160    }
4161
4162    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4163        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4164
4165        CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4166          blackLevelSourcePattern->cam_black_level[0],
4167          blackLevelSourcePattern->cam_black_level[1],
4168          blackLevelSourcePattern->cam_black_level[2],
4169          blackLevelSourcePattern->cam_black_level[3]);
4170    }
4171
4172    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4173        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4174        float fwk_blackLevelInd[4];
4175
4176        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4177        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4178        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4179        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4180
4181        CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4182          blackLevelAppliedPattern->cam_black_level[0],
4183          blackLevelAppliedPattern->cam_black_level[1],
4184          blackLevelAppliedPattern->cam_black_level[2],
4185          blackLevelAppliedPattern->cam_black_level[3]);
4186        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4187        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4188    }
4189
4190
4191    if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4192        gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4193        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4194        for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4195            opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4196        }
4197        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4198                opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4199    }
4200
4201    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4202            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4203        int32_t scalerCropRegion[4];
4204        scalerCropRegion[0] = hScalerCropRegion->left;
4205        scalerCropRegion[1] = hScalerCropRegion->top;
4206        scalerCropRegion[2] = hScalerCropRegion->width;
4207        scalerCropRegion[3] = hScalerCropRegion->height;
4208
4209        // Adjust crop region from sensor output coordinate system to active
4210        // array coordinate system.
4211        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4212                scalerCropRegion[2], scalerCropRegion[3]);
4213
4214        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4215    }
4216
4217    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4218        CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4219        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4220    }
4221
4222    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4223            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4224        CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4225        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4226    }
4227
4228    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4229            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4230        CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4231        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4232                sensorRollingShutterSkew, 1);
4233    }
4234
4235    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4236        CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4237        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4238
4239        //calculate the noise profile based on sensitivity
4240        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4241        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4242        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4243        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4244            noise_profile[i]   = noise_profile_S;
4245            noise_profile[i+1] = noise_profile_O;
4246        }
4247        CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4248                noise_profile_S, noise_profile_O);
4249        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4250                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4251    }
4252
4253    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4254        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4255        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4256    }
4257
4258    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4259        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4260                *faceDetectMode);
4261        if (NAME_NOT_FOUND != val) {
4262            uint8_t fwk_faceDetectMode = (uint8_t)val;
4263            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4264
4265            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4266                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4267                        CAM_INTF_META_FACE_DETECTION, metadata) {
4268                    uint8_t numFaces = MIN(
4269                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4270                    int32_t faceIds[MAX_ROI];
4271                    uint8_t faceScores[MAX_ROI];
4272                    int32_t faceRectangles[MAX_ROI * 4];
4273                    int32_t faceLandmarks[MAX_ROI * 6];
4274                    size_t j = 0, k = 0;
4275
4276                    for (size_t i = 0; i < numFaces; i++) {
4277                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4278                        // Adjust crop region from sensor output coordinate system to active
4279                        // array coordinate system.
4280                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4281                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4282                                rect.width, rect.height);
4283
4284                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4285                                faceRectangles+j, -1);
4286
4287                        // Map the co-ordinate sensor output coordinate system to active
4288                        // array coordinate system.
4289                        cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4290                        mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4291                                face.left_eye_center.y);
4292                        mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4293                                face.right_eye_center.y);
4294                        mCropRegionMapper.toActiveArray(face.mouth_center.x,
4295                                face.mouth_center.y);
4296
4297                        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4298                        j+= 4;
4299                        k+= 6;
4300                    }
4301                    if (numFaces <= 0) {
4302                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4303                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4304                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4305                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4306                    }
4307
4308                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4309                            numFaces);
4310                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4311                            faceRectangles, numFaces * 4U);
4312                    if (fwk_faceDetectMode ==
4313                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4314                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4315                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4316                                faceLandmarks, numFaces * 6U);
4317                   }
4318                }
4319            }
4320        }
4321    }
4322
4323    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4324        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4325        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4326    }
4327
4328    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4329            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4330        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4331        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4332    }
4333
4334    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4335            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4336        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4337                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4338    }
4339
4340    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4341            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4342        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4343                CAM_MAX_SHADING_MAP_HEIGHT);
4344        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4345                CAM_MAX_SHADING_MAP_WIDTH);
4346        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4347                lensShadingMap->lens_shading, 4U * map_width * map_height);
4348    }
4349
4350    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4351        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4352        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4353    }
4354
4355    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4356        //Populate CAM_INTF_META_TONEMAP_CURVES
4357        /* ch0 = G, ch 1 = B, ch 2 = R*/
4358        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4359            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4360                    __func__, tonemap->tonemap_points_cnt,
4361                    CAM_MAX_TONEMAP_CURVE_SIZE);
4362            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4363        }
4364
4365        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4366                        &tonemap->curves[0].tonemap_points[0][0],
4367                        tonemap->tonemap_points_cnt * 2);
4368
4369        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4370                        &tonemap->curves[1].tonemap_points[0][0],
4371                        tonemap->tonemap_points_cnt * 2);
4372
4373        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4374                        &tonemap->curves[2].tonemap_points[0][0],
4375                        tonemap->tonemap_points_cnt * 2);
4376    }
4377
4378    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4379            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4380        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4381                CC_GAINS_COUNT);
4382    }
4383
4384    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4385            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4386        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4387                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4388                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4389    }
4390
4391    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4392            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4393        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4394            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4395                    __func__, toneCurve->tonemap_points_cnt,
4396                    CAM_MAX_TONEMAP_CURVE_SIZE);
4397            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4398        }
4399        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4400                (float*)toneCurve->curve.tonemap_points,
4401                toneCurve->tonemap_points_cnt * 2);
4402    }
4403
4404    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4405            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4406        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4407                predColorCorrectionGains->gains, 4);
4408    }
4409
4410    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4411            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4412        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4413                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4414                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4415    }
4416
4417    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4418        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4419    }
4420
4421    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4422        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4423        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4424    }
4425
4426    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4427        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4428        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4429    }
4430
4431    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4432        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4433                *effectMode);
4434        if (NAME_NOT_FOUND != val) {
4435            uint8_t fwk_effectMode = (uint8_t)val;
4436            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4437        }
4438    }
4439
4440    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4441            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4442        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4443                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4444        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4445            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4446        }
4447        int32_t fwk_testPatternData[4];
4448        fwk_testPatternData[0] = testPatternData->r;
4449        fwk_testPatternData[3] = testPatternData->b;
4450        switch (gCamCapability[mCameraId]->color_arrangement) {
4451        case CAM_FILTER_ARRANGEMENT_RGGB:
4452        case CAM_FILTER_ARRANGEMENT_GRBG:
4453            fwk_testPatternData[1] = testPatternData->gr;
4454            fwk_testPatternData[2] = testPatternData->gb;
4455            break;
4456        case CAM_FILTER_ARRANGEMENT_GBRG:
4457        case CAM_FILTER_ARRANGEMENT_BGGR:
4458            fwk_testPatternData[2] = testPatternData->gr;
4459            fwk_testPatternData[1] = testPatternData->gb;
4460            break;
4461        default:
4462            ALOGE("%s: color arrangement %d is not supported", __func__,
4463                gCamCapability[mCameraId]->color_arrangement);
4464            break;
4465        }
4466        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4467    }
4468
4469    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4470        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4471    }
4472
4473    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4474        String8 str((const char *)gps_methods);
4475        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4476    }
4477
4478    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4479        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4480    }
4481
4482    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4483        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4484    }
4485
4486    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4487        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4488        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4489    }
4490
4491    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4492        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4493        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4494    }
4495
4496    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4497        int32_t fwk_thumb_size[2];
4498        fwk_thumb_size[0] = thumb_size->width;
4499        fwk_thumb_size[1] = thumb_size->height;
4500        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4501    }
4502
4503    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4504        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4505                privateData,
4506                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4507    }
4508
4509    if (metadata->is_tuning_params_valid) {
4510        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4511        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4512        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4513
4514
4515        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4516                sizeof(uint32_t));
4517        data += sizeof(uint32_t);
4518
4519        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4520                sizeof(uint32_t));
4521        CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4522        data += sizeof(uint32_t);
4523
4524        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4525                sizeof(uint32_t));
4526        CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4527        data += sizeof(uint32_t);
4528
4529        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4530                sizeof(uint32_t));
4531        CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4532        data += sizeof(uint32_t);
4533
4534        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4535                sizeof(uint32_t));
4536        CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4537        data += sizeof(uint32_t);
4538
4539        metadata->tuning_params.tuning_mod3_data_size = 0;
4540        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4541                sizeof(uint32_t));
4542        CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4543        data += sizeof(uint32_t);
4544
4545        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4546                TUNING_SENSOR_DATA_MAX);
4547        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4548                count);
4549        data += count;
4550
4551        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4552                TUNING_VFE_DATA_MAX);
4553        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4554                count);
4555        data += count;
4556
4557        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4558                TUNING_CPP_DATA_MAX);
4559        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4560                count);
4561        data += count;
4562
4563        count = MIN(metadata->tuning_params.tuning_cac_data_size,
4564                TUNING_CAC_DATA_MAX);
4565        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4566                count);
4567        data += count;
4568
4569        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4570                (int32_t *)(void *)tuning_meta_data_blob,
4571                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4572    }
4573
4574    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4575            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4576        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4577                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4578                NEUTRAL_COL_POINTS);
4579    }
4580
4581    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4582        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4583        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4584    }
4585
4586    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4587        int32_t aeRegions[REGIONS_TUPLE_COUNT];
4588        // Adjust crop region from sensor output coordinate system to active
4589        // array coordinate system.
4590        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4591                hAeRegions->rect.width, hAeRegions->rect.height);
4592
4593        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4594        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4595                REGIONS_TUPLE_COUNT);
4596        CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4597                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4598                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4599                hAeRegions->rect.height);
4600    }
4601
4602    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4603        uint8_t fwk_afState = (uint8_t) *afState;
4604        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4605        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4606    }
4607
4608    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4609        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4610    }
4611
4612    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4613        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4614    }
4615
4616    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4617        uint8_t fwk_lensState = *lensState;
4618        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4619    }
4620
4621    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4622        /*af regions*/
4623        int32_t afRegions[REGIONS_TUPLE_COUNT];
4624        // Adjust crop region from sensor output coordinate system to active
4625        // array coordinate system.
4626        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4627                hAfRegions->rect.width, hAfRegions->rect.height);
4628
4629        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4630        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4631                REGIONS_TUPLE_COUNT);
4632        CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4633                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4634                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4635                hAfRegions->rect.height);
4636    }
4637
4638    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4639        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4640                *hal_ab_mode);
4641        if (NAME_NOT_FOUND != val) {
4642            uint8_t fwk_ab_mode = (uint8_t)val;
4643            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4644        }
4645    }
4646
4647    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4648        int val = lookupFwkName(SCENE_MODES_MAP,
4649                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4650        if (NAME_NOT_FOUND != val) {
4651            uint8_t fwkBestshotMode = (uint8_t)val;
4652            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4653            CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4654        } else {
4655            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4656        }
4657    }
4658
4659    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4660         uint8_t fwk_mode = (uint8_t) *mode;
4661         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4662    }
4663
4664    /* Constant metadata values to be update*/
4665    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4666    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4667
4668    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4669    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4670
4671    int32_t hotPixelMap[2];
4672    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4673
4674    // CDS
4675    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4676        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
4677    }
4678
4679    // TNR
4680    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
4681        uint8_t tnr_enable       = tnr->denoise_enable;
4682        int32_t tnr_process_type = (int32_t)tnr->process_plates;
4683
4684        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
4685        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
4686    }
4687
4688    // Reprocess crop data
4689    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
4690        uint8_t cnt = crop_data->num_of_streams;
4691        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
4692            // mm-qcamera-daemon only posts crop_data for streams
4693            // not linked to pproc. So no valid crop metadata is not
4694            // necessarily an error case.
4695            CDBG("%s: No valid crop metadata entries", __func__);
4696        } else {
4697            uint32_t reproc_stream_id;
4698            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4699                CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
4700            } else {
4701                int rc = NO_ERROR;
4702                Vector<int32_t> roi_map;
4703                int32_t *crop = new int32_t[cnt*4];
4704                if (NULL == crop) {
4705                   rc = NO_MEMORY;
4706                }
4707                if (NO_ERROR == rc) {
4708                    int32_t streams_found = 0;
4709                    for (size_t i = 0; i < cnt; i++) {
4710                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
4711                            if (pprocDone) {
4712                                // HAL already does internal reprocessing,
4713                                // either via reprocessing before JPEG encoding,
4714                                // or offline postprocessing for pproc bypass case.
4715                                crop[0] = 0;
4716                                crop[1] = 0;
4717                                crop[2] = mInputStreamInfo.dim.width;
4718                                crop[3] = mInputStreamInfo.dim.height;
4719                            } else {
4720                                crop[0] = crop_data->crop_info[i].crop.left;
4721                                crop[1] = crop_data->crop_info[i].crop.top;
4722                                crop[2] = crop_data->crop_info[i].crop.width;
4723                                crop[3] = crop_data->crop_info[i].crop.height;
4724                            }
4725                            roi_map.add(crop_data->crop_info[i].roi_map.left);
4726                            roi_map.add(crop_data->crop_info[i].roi_map.top);
4727                            roi_map.add(crop_data->crop_info[i].roi_map.width);
4728                            roi_map.add(crop_data->crop_info[i].roi_map.height);
4729                            streams_found++;
4730                            CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
4731                                    __func__,
4732                                    crop[0], crop[1], crop[2], crop[3]);
4733                            CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
4734                                    __func__,
4735                                    crop_data->crop_info[i].roi_map.left,
4736                                    crop_data->crop_info[i].roi_map.top,
4737                                    crop_data->crop_info[i].roi_map.width,
4738                                    crop_data->crop_info[i].roi_map.height);
4739                            break;
4740
4741                       }
4742                    }
4743                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
4744                            &streams_found, 1);
4745                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
4746                            crop, (size_t)(streams_found * 4));
4747                    if (roi_map.array()) {
4748                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
4749                                roi_map.array(), roi_map.size());
4750                    }
4751               }
4752               if (crop) {
4753                   delete [] crop;
4754               }
4755            }
4756        }
4757    }
4758
4759    IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
4760        int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
4761                *cacMode);
4762        if (NAME_NOT_FOUND != val) {
4763            uint8_t fwkCacMode = (uint8_t)val;
4764            camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
4765        } else {
4766            ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
4767        }
4768    }
4769
4770    // Post blob of cam_cds_data through vendor tag.
4771    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
4772        uint8_t cnt = cdsInfo->num_of_streams;
4773        cam_cds_data_t cdsDataOverride;
4774        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
4775        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
4776        cdsDataOverride.num_of_streams = 1;
4777        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
4778            uint32_t reproc_stream_id;
4779            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
4780                CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
4781            } else {
4782                for (size_t i = 0; i < cnt; i++) {
4783                    if (cdsInfo->cds_info[i].stream_id ==
4784                            reproc_stream_id) {
4785                        cdsDataOverride.cds_info[0].cds_enable =
4786                                cdsInfo->cds_info[i].cds_enable;
4787                        break;
4788                    }
4789                }
4790            }
4791        } else {
4792            CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
4793        }
4794        camMetadata.update(QCAMERA3_CDS_INFO,
4795                (uint8_t *)&cdsDataOverride,
4796                sizeof(cam_cds_data_t));
4797    }
4798
4799    // Ldaf calibration data
4800    if (!mLdafCalibExist) {
4801        IF_META_AVAILABLE(uint32_t, ldafCalib,
4802                CAM_INTF_META_LDAF_EXIF, metadata) {
4803            mLdafCalibExist = true;
4804            mLdafCalib[0] = ldafCalib[0];
4805            mLdafCalib[1] = ldafCalib[1];
4806            CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
4807                    ldafCalib[0], ldafCalib[1]);
4808        }
4809    }
4810
4811    resultMetadata = camMetadata.release();
4812    return resultMetadata;
4813}
4814
4815/*===========================================================================
4816 * FUNCTION   : saveExifParams
4817 *
4818 * DESCRIPTION:
4819 *
4820 * PARAMETERS :
4821 *   @metadata : metadata information from callback
4822 *
4823 * RETURN     : none
4824 *
4825 *==========================================================================*/
4826void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
4827{
4828    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
4829            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
4830        mExifParams.ae_debug_params = *ae_exif_debug_params;
4831        mExifParams.ae_debug_params_valid = TRUE;
4832    }
4833    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
4834            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
4835        mExifParams.awb_debug_params = *awb_exif_debug_params;
4836        mExifParams.awb_debug_params_valid = TRUE;
4837    }
4838    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
4839            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
4840        mExifParams.af_debug_params = *af_exif_debug_params;
4841        mExifParams.af_debug_params_valid = TRUE;
4842    }
4843    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
4844            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
4845        mExifParams.asd_debug_params = *asd_exif_debug_params;
4846        mExifParams.asd_debug_params_valid = TRUE;
4847    }
4848    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
4849            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
4850        mExifParams.stats_debug_params = *stats_exif_debug_params;
4851        mExifParams.stats_debug_params_valid = TRUE;
4852    }
4853}
4854
4855/*===========================================================================
4856 * FUNCTION   : get3AExifParams
4857 *
4858 * DESCRIPTION:
4859 *
4860 * PARAMETERS : none
4861 *
4862 *
4863 * RETURN     : mm_jpeg_exif_params_t
4864 *
4865 *==========================================================================*/
4866mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
4867{
4868    return mExifParams;
4869}
4870
4871/*===========================================================================
4872 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
4873 *
4874 * DESCRIPTION:
4875 *
4876 * PARAMETERS :
4877 *   @metadata : metadata information from callback
4878 *
4879 * RETURN     : camera_metadata_t*
4880 *              metadata in a format specified by fwk
4881 *==========================================================================*/
4882camera_metadata_t*
4883QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
4884                                (metadata_buffer_t *metadata)
4885{
4886    CameraMetadata camMetadata;
4887    camera_metadata_t *resultMetadata;
4888
4889
4890    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
4891        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
4892        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
4893        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
4894    }
4895
4896    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
4897        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
4898                &aecTrigger->trigger, 1);
4899        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
4900                &aecTrigger->trigger_id, 1);
4901        CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
4902                __func__, aecTrigger->trigger);
4903        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
4904                aecTrigger->trigger_id);
4905    }
4906
4907    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
4908        uint8_t fwk_ae_state = (uint8_t) *ae_state;
4909        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
4910        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
4911    }
4912
4913    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4914        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4915        if (NAME_NOT_FOUND != val) {
4916            uint8_t fwkAfMode = (uint8_t)val;
4917            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4918            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
4919        } else {
4920            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__,
4921                    val);
4922        }
4923    }
4924
4925    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
4926        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
4927                &af_trigger->trigger, 1);
4928        CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
4929                __func__, af_trigger->trigger);
4930        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
4931        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
4932                af_trigger->trigger_id);
4933    }
4934
4935    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
4936        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
4937                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
4938        if (NAME_NOT_FOUND != val) {
4939            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
4940            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
4941            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
4942        } else {
4943            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
4944        }
4945    }
4946
4947    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4948    uint32_t aeMode = CAM_AE_MODE_MAX;
4949    int32_t flashMode = CAM_FLASH_MODE_MAX;
4950    int32_t redeye = -1;
4951    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
4952        aeMode = *pAeMode;
4953    }
4954    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
4955        flashMode = *pFlashMode;
4956    }
4957    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
4958        redeye = *pRedeye;
4959    }
4960
4961    if (1 == redeye) {
4962        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
4963        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4964    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
4965        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
4966                flashMode);
4967        if (NAME_NOT_FOUND != val) {
4968            fwk_aeMode = (uint8_t)val;
4969            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4970        } else {
4971            ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
4972        }
4973    } else if (aeMode == CAM_AE_MODE_ON) {
4974        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
4975        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4976    } else if (aeMode == CAM_AE_MODE_OFF) {
4977        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
4978        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
4979    } else {
4980        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
4981              "flashMode:%d, aeMode:%u!!!",
4982                __func__, redeye, flashMode, aeMode);
4983    }
4984
4985    resultMetadata = camMetadata.release();
4986    return resultMetadata;
4987}
4988
4989/*===========================================================================
4990 * FUNCTION   : dumpMetadataToFile
4991 *
4992 * DESCRIPTION: Dumps tuning metadata to file system
4993 *
4994 * PARAMETERS :
4995 *   @meta           : tuning metadata
4996 *   @dumpFrameCount : current dump frame count
4997 *   @enabled        : Enable mask
4998 *
4999 *==========================================================================*/
5000void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5001                                                   uint32_t &dumpFrameCount,
5002                                                   bool enabled,
5003                                                   const char *type,
5004                                                   uint32_t frameNumber)
5005{
5006    uint32_t frm_num = 0;
5007
5008    //Some sanity checks
5009    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5010        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
5011              __func__,
5012              meta.tuning_sensor_data_size,
5013              TUNING_SENSOR_DATA_MAX);
5014        return;
5015    }
5016
5017    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5018        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
5019              __func__,
5020              meta.tuning_vfe_data_size,
5021              TUNING_VFE_DATA_MAX);
5022        return;
5023    }
5024
5025    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5026        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
5027              __func__,
5028              meta.tuning_cpp_data_size,
5029              TUNING_CPP_DATA_MAX);
5030        return;
5031    }
5032
5033    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5034        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
5035              __func__,
5036              meta.tuning_cac_data_size,
5037              TUNING_CAC_DATA_MAX);
5038        return;
5039    }
5040    //
5041
5042    if(enabled){
5043        char timeBuf[FILENAME_MAX];
5044        char buf[FILENAME_MAX];
5045        memset(buf, 0, sizeof(buf));
5046        memset(timeBuf, 0, sizeof(timeBuf));
5047        time_t current_time;
5048        struct tm * timeinfo;
5049        time (&current_time);
5050        timeinfo = localtime (&current_time);
5051        if (timeinfo != NULL) {
5052            strftime (timeBuf, sizeof(timeBuf),
5053                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5054        }
5055        String8 filePath(timeBuf);
5056        snprintf(buf,
5057                sizeof(buf),
5058                "%dm_%s_%d.bin",
5059                dumpFrameCount,
5060                type,
5061                frameNumber);
5062        filePath.append(buf);
5063        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5064        if (file_fd >= 0) {
5065            ssize_t written_len = 0;
5066            meta.tuning_data_version = TUNING_DATA_VERSION;
5067            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5068            written_len += write(file_fd, data, sizeof(uint32_t));
5069            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5070            CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
5071            written_len += write(file_fd, data, sizeof(uint32_t));
5072            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5073            CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
5074            written_len += write(file_fd, data, sizeof(uint32_t));
5075            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5076            CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
5077            written_len += write(file_fd, data, sizeof(uint32_t));
5078            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5079            CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
5080            written_len += write(file_fd, data, sizeof(uint32_t));
5081            meta.tuning_mod3_data_size = 0;
5082            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5083            CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
5084            written_len += write(file_fd, data, sizeof(uint32_t));
5085            size_t total_size = meta.tuning_sensor_data_size;
5086            data = (void *)((uint8_t *)&meta.data);
5087            written_len += write(file_fd, data, total_size);
5088            total_size = meta.tuning_vfe_data_size;
5089            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5090            written_len += write(file_fd, data, total_size);
5091            total_size = meta.tuning_cpp_data_size;
5092            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5093            written_len += write(file_fd, data, total_size);
5094            total_size = meta.tuning_cac_data_size;
5095            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5096            written_len += write(file_fd, data, total_size);
5097            close(file_fd);
5098        }else {
5099            ALOGE("%s: fail to open file for metadata dumping", __func__);
5100        }
5101    }
5102}
5103
5104/*===========================================================================
5105 * FUNCTION   : cleanAndSortStreamInfo
5106 *
5107 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5108 *              and sort them such that raw stream is at the end of the list
5109 *              This is a workaround for camera daemon constraint.
5110 *
5111 * PARAMETERS : None
5112 *
5113 *==========================================================================*/
5114void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5115{
5116    List<stream_info_t *> newStreamInfo;
5117
5118    /*clean up invalid streams*/
5119    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5120            it != mStreamInfo.end();) {
5121        if(((*it)->status) == INVALID){
5122            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5123            delete channel;
5124            free(*it);
5125            it = mStreamInfo.erase(it);
5126        } else {
5127            it++;
5128        }
5129    }
5130
5131    // Move preview/video/callback/snapshot streams into newList
5132    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5133            it != mStreamInfo.end();) {
5134        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5135                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5136                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5137            newStreamInfo.push_back(*it);
5138            it = mStreamInfo.erase(it);
5139        } else
5140            it++;
5141    }
5142    // Move raw streams into newList
5143    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5144            it != mStreamInfo.end();) {
5145        newStreamInfo.push_back(*it);
5146        it = mStreamInfo.erase(it);
5147    }
5148
5149    mStreamInfo = newStreamInfo;
5150}
5151
5152/*===========================================================================
5153 * FUNCTION   : extractJpegMetadata
5154 *
5155 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5156 *              JPEG metadata is cached in HAL, and return as part of capture
5157 *              result when metadata is returned from camera daemon.
5158 *
5159 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5160 *              @request:      capture request
5161 *
5162 *==========================================================================*/
5163void QCamera3HardwareInterface::extractJpegMetadata(
5164        CameraMetadata& jpegMetadata,
5165        const camera3_capture_request_t *request)
5166{
5167    CameraMetadata frame_settings;
5168    frame_settings = request->settings;
5169
5170    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5171        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5172                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5173                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5174
5175    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5176        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5177                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5178                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5179
5180    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5181        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5182                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5183                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5184
5185    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5186        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5187                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5188                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5189
5190    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5191        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5192                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5193                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5194
5195    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5196        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5197                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5198                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5199
5200    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5201        int32_t thumbnail_size[2];
5202        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5203        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5204        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5205            int32_t orientation =
5206                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5207            if ((orientation == 90) || (orientation == 270)) {
5208               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5209               int32_t temp;
5210               temp = thumbnail_size[0];
5211               thumbnail_size[0] = thumbnail_size[1];
5212               thumbnail_size[1] = temp;
5213            }
5214         }
5215         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5216                thumbnail_size,
5217                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5218    }
5219
5220}
5221
5222/*===========================================================================
5223 * FUNCTION   : convertToRegions
5224 *
5225 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5226 *
5227 * PARAMETERS :
5228 *   @rect   : cam_rect_t struct to convert
5229 *   @region : int32_t destination array
5230 *   @weight : if we are converting from cam_area_t, weight is valid
5231 *             else weight = -1
5232 *
5233 *==========================================================================*/
5234void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5235        int32_t *region, int weight)
5236{
5237    region[0] = rect.left;
5238    region[1] = rect.top;
5239    region[2] = rect.left + rect.width;
5240    region[3] = rect.top + rect.height;
5241    if (weight > -1) {
5242        region[4] = weight;
5243    }
5244}
5245
5246/*===========================================================================
5247 * FUNCTION   : convertFromRegions
5248 *
5249 * DESCRIPTION: helper method to convert from array to cam_rect_t
5250 *
5251 * PARAMETERS :
5252 *   @rect   : cam_rect_t struct to convert
5253 *   @region : int32_t destination array
5254 *   @weight : if we are converting from cam_area_t, weight is valid
5255 *             else weight = -1
5256 *
5257 *==========================================================================*/
5258void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5259        const camera_metadata_t *settings, uint32_t tag)
5260{
5261    CameraMetadata frame_settings;
5262    frame_settings = settings;
5263    int32_t x_min = frame_settings.find(tag).data.i32[0];
5264    int32_t y_min = frame_settings.find(tag).data.i32[1];
5265    int32_t x_max = frame_settings.find(tag).data.i32[2];
5266    int32_t y_max = frame_settings.find(tag).data.i32[3];
5267    roi.weight = frame_settings.find(tag).data.i32[4];
5268    roi.rect.left = x_min;
5269    roi.rect.top = y_min;
5270    roi.rect.width = x_max - x_min;
5271    roi.rect.height = y_max - y_min;
5272}
5273
5274/*===========================================================================
5275 * FUNCTION   : resetIfNeededROI
5276 *
5277 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5278 *              crop region
5279 *
5280 * PARAMETERS :
5281 *   @roi       : cam_area_t struct to resize
5282 *   @scalerCropRegion : cam_crop_region_t region to compare against
5283 *
5284 *
5285 *==========================================================================*/
5286bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5287                                                 const cam_crop_region_t* scalerCropRegion)
5288{
5289    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5290    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5291    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5292    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5293
5294    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5295     * without having this check the calculations below to validate if the roi
5296     * is inside scalar crop region will fail resulting in the roi not being
5297     * reset causing algorithm to continue to use stale roi window
5298     */
5299    if (roi->weight == 0) {
5300        return true;
5301    }
5302
5303    if ((roi_x_max < scalerCropRegion->left) ||
5304        // right edge of roi window is left of scalar crop's left edge
5305        (roi_y_max < scalerCropRegion->top)  ||
5306        // bottom edge of roi window is above scalar crop's top edge
5307        (roi->rect.left > crop_x_max) ||
5308        // left edge of roi window is beyond(right) of scalar crop's right edge
5309        (roi->rect.top > crop_y_max)){
5310        // top edge of roi windo is above scalar crop's top edge
5311        return false;
5312    }
5313    if (roi->rect.left < scalerCropRegion->left) {
5314        roi->rect.left = scalerCropRegion->left;
5315    }
5316    if (roi->rect.top < scalerCropRegion->top) {
5317        roi->rect.top = scalerCropRegion->top;
5318    }
5319    if (roi_x_max > crop_x_max) {
5320        roi_x_max = crop_x_max;
5321    }
5322    if (roi_y_max > crop_y_max) {
5323        roi_y_max = crop_y_max;
5324    }
5325    roi->rect.width = roi_x_max - roi->rect.left;
5326    roi->rect.height = roi_y_max - roi->rect.top;
5327    return true;
5328}
5329
5330/*===========================================================================
5331 * FUNCTION   : convertLandmarks
5332 *
5333 * DESCRIPTION: helper method to extract the landmarks from face detection info
5334 *
5335 * PARAMETERS :
5336 *   @face   : cam_rect_t struct to convert
5337 *   @landmarks : int32_t destination array
5338 *
5339 *
5340 *==========================================================================*/
5341void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5342{
5343    landmarks[0] = (int32_t)face.left_eye_center.x;
5344    landmarks[1] = (int32_t)face.left_eye_center.y;
5345    landmarks[2] = (int32_t)face.right_eye_center.x;
5346    landmarks[3] = (int32_t)face.right_eye_center.y;
5347    landmarks[4] = (int32_t)face.mouth_center.x;
5348    landmarks[5] = (int32_t)face.mouth_center.y;
5349}
5350
5351#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5352/*===========================================================================
5353 * FUNCTION   : initCapabilities
5354 *
5355 * DESCRIPTION: initialize camera capabilities in static data struct
5356 *
5357 * PARAMETERS :
5358 *   @cameraId  : camera Id
5359 *
5360 * RETURN     : int32_t type of status
5361 *              NO_ERROR  -- success
5362 *              none-zero failure code
5363 *==========================================================================*/
5364int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5365{
5366    int rc = 0;
5367    mm_camera_vtbl_t *cameraHandle = NULL;
5368    QCamera3HeapMemory *capabilityHeap = NULL;
5369
5370    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5371    if (rc || !cameraHandle) {
5372        ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5373        goto open_failed;
5374    }
5375
5376    capabilityHeap = new QCamera3HeapMemory(1);
5377    if (capabilityHeap == NULL) {
5378        ALOGE("%s: creation of capabilityHeap failed", __func__);
5379        goto heap_creation_failed;
5380    }
5381    /* Allocate memory for capability buffer */
5382    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5383    if(rc != OK) {
5384        ALOGE("%s: No memory for cappability", __func__);
5385        goto allocate_failed;
5386    }
5387
5388    /* Map memory for capability buffer */
5389    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5390    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5391                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5392                                capabilityHeap->getFd(0),
5393                                sizeof(cam_capability_t));
5394    if(rc < 0) {
5395        ALOGE("%s: failed to map capability buffer", __func__);
5396        goto map_failed;
5397    }
5398
5399    /* Query Capability */
5400    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5401    if(rc < 0) {
5402        ALOGE("%s: failed to query capability",__func__);
5403        goto query_failed;
5404    }
5405    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5406    if (!gCamCapability[cameraId]) {
5407        ALOGE("%s: out of memory", __func__);
5408        goto query_failed;
5409    }
5410    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5411                                        sizeof(cam_capability_t));
5412    rc = 0;
5413
5414query_failed:
5415    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5416                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5417map_failed:
5418    capabilityHeap->deallocate();
5419allocate_failed:
5420    delete capabilityHeap;
5421heap_creation_failed:
5422    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5423    cameraHandle = NULL;
5424open_failed:
5425    return rc;
5426}
5427
5428/*==========================================================================
5429 * FUNCTION   : get3Aversion
5430 *
5431 * DESCRIPTION: get the Q3A S/W version
5432 *
5433 * PARAMETERS :
5434 *  @sw_version: Reference of Q3A structure which will hold version info upon
5435 *               return
5436 *
5437 * RETURN     : None
5438 *
5439 *==========================================================================*/
5440void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5441{
5442    if(gCamCapability[mCameraId])
5443        sw_version = gCamCapability[mCameraId]->q3a_version;
5444    else
5445        ALOGE("%s:Capability structure NULL!", __func__);
5446}
5447
5448
5449/*===========================================================================
5450 * FUNCTION   : initParameters
5451 *
5452 * DESCRIPTION: initialize camera parameters
5453 *
5454 * PARAMETERS :
5455 *
5456 * RETURN     : int32_t type of status
5457 *              NO_ERROR  -- success
5458 *              none-zero failure code
5459 *==========================================================================*/
5460int QCamera3HardwareInterface::initParameters()
5461{
5462    int rc = 0;
5463
5464    //Allocate Set Param Buffer
5465    mParamHeap = new QCamera3HeapMemory(1);
5466    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5467    if(rc != OK) {
5468        rc = NO_MEMORY;
5469        ALOGE("Failed to allocate SETPARM Heap memory");
5470        delete mParamHeap;
5471        mParamHeap = NULL;
5472        return rc;
5473    }
5474
5475    //Map memory for parameters buffer
5476    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5477            CAM_MAPPING_BUF_TYPE_PARM_BUF,
5478            mParamHeap->getFd(0),
5479            sizeof(metadata_buffer_t));
5480    if(rc < 0) {
5481        ALOGE("%s:failed to map SETPARM buffer",__func__);
5482        rc = FAILED_TRANSACTION;
5483        mParamHeap->deallocate();
5484        delete mParamHeap;
5485        mParamHeap = NULL;
5486        return rc;
5487    }
5488
5489    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5490
5491    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5492    return rc;
5493}
5494
5495/*===========================================================================
5496 * FUNCTION   : deinitParameters
5497 *
5498 * DESCRIPTION: de-initialize camera parameters
5499 *
5500 * PARAMETERS :
5501 *
5502 * RETURN     : NONE
5503 *==========================================================================*/
5504void QCamera3HardwareInterface::deinitParameters()
5505{
5506    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5507            CAM_MAPPING_BUF_TYPE_PARM_BUF);
5508
5509    mParamHeap->deallocate();
5510    delete mParamHeap;
5511    mParamHeap = NULL;
5512
5513    mParameters = NULL;
5514
5515    free(mPrevParameters);
5516    mPrevParameters = NULL;
5517}
5518
5519/*===========================================================================
5520 * FUNCTION   : calcMaxJpegSize
5521 *
5522 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5523 *
5524 * PARAMETERS :
5525 *
5526 * RETURN     : max_jpeg_size
5527 *==========================================================================*/
5528size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5529{
5530    size_t max_jpeg_size = 0;
5531    size_t temp_width, temp_height;
5532    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5533            MAX_SIZES_CNT);
5534    for (size_t i = 0; i < count; i++) {
5535        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5536        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5537        if (temp_width * temp_height > max_jpeg_size ) {
5538            max_jpeg_size = temp_width * temp_height;
5539        }
5540    }
5541    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5542    return max_jpeg_size;
5543}
5544
5545/*===========================================================================
5546 * FUNCTION   : getMaxRawSize
5547 *
5548 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5549 *
5550 * PARAMETERS :
5551 *
5552 * RETURN     : Largest supported Raw Dimension
5553 *==========================================================================*/
5554cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5555{
5556    int max_width = 0;
5557    cam_dimension_t maxRawSize;
5558
5559    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5560    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5561        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5562            max_width = gCamCapability[camera_id]->raw_dim[i].width;
5563            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5564        }
5565    }
5566    return maxRawSize;
5567}
5568
5569
5570/*===========================================================================
5571 * FUNCTION   : calcMaxJpegDim
5572 *
5573 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5574 *
5575 * PARAMETERS :
5576 *
5577 * RETURN     : max_jpeg_dim
5578 *==========================================================================*/
5579cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5580{
5581    cam_dimension_t max_jpeg_dim;
5582    cam_dimension_t curr_jpeg_dim;
5583    max_jpeg_dim.width = 0;
5584    max_jpeg_dim.height = 0;
5585    curr_jpeg_dim.width = 0;
5586    curr_jpeg_dim.height = 0;
5587    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5588        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5589        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5590        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5591            max_jpeg_dim.width * max_jpeg_dim.height ) {
5592            max_jpeg_dim.width = curr_jpeg_dim.width;
5593            max_jpeg_dim.height = curr_jpeg_dim.height;
5594        }
5595    }
5596    return max_jpeg_dim;
5597}
5598
5599/*===========================================================================
5600 * FUNCTION   : addStreamConfig
5601 *
5602 * DESCRIPTION: adds the stream configuration to the array
5603 *
5604 * PARAMETERS :
5605 * @available_stream_configs : pointer to stream configuration array
5606 * @scalar_format            : scalar format
5607 * @dim                      : configuration dimension
5608 * @config_type              : input or output configuration type
5609 *
5610 * RETURN     : NONE
5611 *==========================================================================*/
5612void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5613        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5614{
5615    available_stream_configs.add(scalar_format);
5616    available_stream_configs.add(dim.width);
5617    available_stream_configs.add(dim.height);
5618    available_stream_configs.add(config_type);
5619}
5620
5621
5622/*===========================================================================
5623 * FUNCTION   : initStaticMetadata
5624 *
5625 * DESCRIPTION: initialize the static metadata
5626 *
5627 * PARAMETERS :
5628 *   @cameraId  : camera Id
5629 *
5630 * RETURN     : int32_t type of status
5631 *              0  -- success
5632 *              non-zero failure code
5633 *==========================================================================*/
5634int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5635{
5636    int rc = 0;
5637    CameraMetadata staticInfo;
5638    size_t count = 0;
5639    bool limitedDevice = false;
5640    char prop[PROPERTY_VALUE_MAX];
5641
5642    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5643     * guaranteed, its advertised as limited device */
5644    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5645            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5646
5647    uint8_t supportedHwLvl = limitedDevice ?
5648            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5649            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
5650
5651    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5652            &supportedHwLvl, 1);
5653
5654    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5655    /*HAL 3 only*/
5656    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5657                    &gCamCapability[cameraId]->min_focus_distance, 1);
5658
5659    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5660                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
5661
5662    /*should be using focal lengths but sensor doesn't provide that info now*/
5663    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5664                      &gCamCapability[cameraId]->focal_length,
5665                      1);
5666
5667    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5668                      gCamCapability[cameraId]->apertures,
5669                      gCamCapability[cameraId]->apertures_count);
5670
5671    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
5672                gCamCapability[cameraId]->filter_densities,
5673                gCamCapability[cameraId]->filter_densities_count);
5674
5675
5676    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
5677                      (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
5678                      gCamCapability[cameraId]->optical_stab_modes_count);
5679
5680    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
5681            gCamCapability[cameraId]->lens_shading_map_size.height};
5682    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
5683                      lens_shading_map_size,
5684                      sizeof(lens_shading_map_size)/sizeof(int32_t));
5685
5686    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
5687            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
5688
5689    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
5690            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
5691
5692    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
5693            &gCamCapability[cameraId]->max_frame_duration, 1);
5694
5695    camera_metadata_rational baseGainFactor = {
5696            gCamCapability[cameraId]->base_gain_factor.numerator,
5697            gCamCapability[cameraId]->base_gain_factor.denominator};
5698    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
5699                      &baseGainFactor, 1);
5700
5701    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
5702                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
5703
5704    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
5705            gCamCapability[cameraId]->pixel_array_size.height};
5706    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
5707                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
5708
5709    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
5710                                                gCamCapability[cameraId]->active_array_size.top,
5711                                                gCamCapability[cameraId]->active_array_size.width,
5712                                                gCamCapability[cameraId]->active_array_size.height};
5713    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
5714                      active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
5715
5716    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
5717            &gCamCapability[cameraId]->white_level, 1);
5718
5719    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
5720            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
5721
5722    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
5723                      &gCamCapability[cameraId]->flash_charge_duration, 1);
5724
5725    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
5726                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
5727
5728    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
5729    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
5730            &timestampSource, 1);
5731
5732    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
5733                      &gCamCapability[cameraId]->histogram_size, 1);
5734
5735    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
5736            &gCamCapability[cameraId]->max_histogram_count, 1);
5737
5738    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
5739            gCamCapability[cameraId]->sharpness_map_size.height};
5740
5741    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
5742            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
5743
5744    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
5745            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
5746
5747    int32_t scalar_formats[] = {
5748            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
5749            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
5750            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
5751            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
5752            HAL_PIXEL_FORMAT_RAW10,
5753            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
5754    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
5755    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
5756                      scalar_formats,
5757                      scalar_formats_count);
5758
5759    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
5760    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5761    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
5762            count, MAX_SIZES_CNT, available_processed_sizes);
5763    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
5764            available_processed_sizes, count * 2);
5765
5766    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
5767    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
5768    makeTable(gCamCapability[cameraId]->raw_dim,
5769            count, MAX_SIZES_CNT, available_raw_sizes);
5770    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
5771            available_raw_sizes, count * 2);
5772
5773    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
5774    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
5775    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
5776            count, MAX_SIZES_CNT, available_fps_ranges);
5777    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
5778            available_fps_ranges, count * 2);
5779
5780    camera_metadata_rational exposureCompensationStep = {
5781            gCamCapability[cameraId]->exp_compensation_step.numerator,
5782            gCamCapability[cameraId]->exp_compensation_step.denominator};
5783    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
5784                      &exposureCompensationStep, 1);
5785
5786    Vector<uint8_t> availableVstabModes;
5787    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
5788    char eis_prop[PROPERTY_VALUE_MAX];
5789    memset(eis_prop, 0, sizeof(eis_prop));
5790    property_get("persist.camera.eis.enable", eis_prop, "0");
5791    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
5792    if (facingBack && eis_prop_set) {
5793        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
5794    }
5795    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
5796                      availableVstabModes.array(), availableVstabModes.size());
5797
5798    /*HAL 1 and HAL 3 common*/
5799    float maxZoom = 4;
5800    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
5801            &maxZoom, 1);
5802
5803    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
5804    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
5805
5806    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
5807    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
5808        max3aRegions[2] = 0; /* AF not supported */
5809    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
5810            max3aRegions, 3);
5811
5812    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
5813    memset(prop, 0, sizeof(prop));
5814    property_get("persist.camera.facedetect", prop, "1");
5815    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
5816    CDBG("%s: Support face detection mode: %d",
5817            __func__, supportedFaceDetectMode);
5818
5819    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
5820    Vector<uint8_t> availableFaceDetectModes;
5821    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
5822    if (supportedFaceDetectMode == 1) {
5823        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5824    } else if (supportedFaceDetectMode == 2) {
5825        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5826    } else if (supportedFaceDetectMode == 3) {
5827        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
5828        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
5829    } else {
5830        maxFaces = 0;
5831    }
5832    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
5833            availableFaceDetectModes.array(),
5834            availableFaceDetectModes.size());
5835    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
5836            (int32_t *)&maxFaces, 1);
5837
5838    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
5839                                           gCamCapability[cameraId]->exposure_compensation_max};
5840    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
5841            exposureCompensationRange,
5842            sizeof(exposureCompensationRange)/sizeof(int32_t));
5843
5844    uint8_t lensFacing = (facingBack) ?
5845            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
5846    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
5847
5848    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
5849                      available_thumbnail_sizes,
5850                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
5851
5852    /*all sizes will be clubbed into this tag*/
5853    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
5854    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
5855    size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
5856            count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
5857            gCamCapability[cameraId]->max_downscale_factor);
5858    /*android.scaler.availableStreamConfigurations*/
5859    size_t max_stream_configs_size = count * scalar_formats_count * 4;
5860    Vector<int32_t> available_stream_configs;
5861    cam_dimension_t active_array_dim;
5862    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
5863    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
5864    /* Add input/output stream configurations for each scalar formats*/
5865    for (size_t j = 0; j < scalar_formats_count; j++) {
5866        switch (scalar_formats[j]) {
5867        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5868        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5869        case HAL_PIXEL_FORMAT_RAW10:
5870            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5871                addStreamConfig(available_stream_configs, scalar_formats[j],
5872                        gCamCapability[cameraId]->raw_dim[i],
5873                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5874            }
5875            break;
5876        case HAL_PIXEL_FORMAT_BLOB:
5877            cam_dimension_t jpeg_size;
5878            for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
5879                jpeg_size.width  = available_jpeg_sizes[i*2];
5880                jpeg_size.height = available_jpeg_sizes[i*2+1];
5881                addStreamConfig(available_stream_configs, scalar_formats[j],
5882                        jpeg_size,
5883                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5884            }
5885            break;
5886        case HAL_PIXEL_FORMAT_YCbCr_420_888:
5887        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
5888        default:
5889            cam_dimension_t largest_picture_size;
5890            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
5891            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5892                addStreamConfig(available_stream_configs, scalar_formats[j],
5893                        gCamCapability[cameraId]->picture_sizes_tbl[i],
5894                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
5895                /* Book keep largest */
5896                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
5897                        >= largest_picture_size.width &&
5898                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
5899                        >= largest_picture_size.height)
5900                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
5901            }
5902            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
5903            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
5904                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5905                 addStreamConfig(available_stream_configs, scalar_formats[j],
5906                         largest_picture_size,
5907                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
5908            }
5909            break;
5910        }
5911    }
5912
5913    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
5914                      available_stream_configs.array(), available_stream_configs.size());
5915    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
5916    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
5917
5918    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5919    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5920
5921    /* android.scaler.availableMinFrameDurations */
5922    int64_t available_min_durations[max_stream_configs_size];
5923    size_t idx = 0;
5924    for (size_t j = 0; j < scalar_formats_count; j++) {
5925        switch (scalar_formats[j]) {
5926        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
5927        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
5928        case HAL_PIXEL_FORMAT_RAW10:
5929            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
5930                available_min_durations[idx] = scalar_formats[j];
5931                available_min_durations[idx+1] =
5932                    gCamCapability[cameraId]->raw_dim[i].width;
5933                available_min_durations[idx+2] =
5934                    gCamCapability[cameraId]->raw_dim[i].height;
5935                available_min_durations[idx+3] =
5936                    gCamCapability[cameraId]->raw_min_duration[i];
5937                idx+=4;
5938            }
5939            break;
5940        default:
5941            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
5942                available_min_durations[idx] = scalar_formats[j];
5943                available_min_durations[idx+1] =
5944                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
5945                available_min_durations[idx+2] =
5946                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
5947                available_min_durations[idx+3] =
5948                    gCamCapability[cameraId]->picture_min_duration[i];
5949                idx+=4;
5950            }
5951            break;
5952        }
5953    }
5954    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
5955                      &available_min_durations[0], idx);
5956
5957    Vector<int32_t> available_hfr_configs;
5958    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
5959        int32_t fps = 0;
5960        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
5961        case CAM_HFR_MODE_60FPS:
5962            fps = 60;
5963            break;
5964        case CAM_HFR_MODE_90FPS:
5965            fps = 90;
5966            break;
5967        case CAM_HFR_MODE_120FPS:
5968            fps = 120;
5969            break;
5970        case CAM_HFR_MODE_150FPS:
5971            fps = 150;
5972            break;
5973        case CAM_HFR_MODE_180FPS:
5974            fps = 180;
5975            break;
5976        case CAM_HFR_MODE_210FPS:
5977            fps = 210;
5978            break;
5979        case CAM_HFR_MODE_240FPS:
5980            fps = 240;
5981            break;
5982        case CAM_HFR_MODE_480FPS:
5983            fps = 480;
5984            break;
5985        case CAM_HFR_MODE_OFF:
5986        case CAM_HFR_MODE_MAX:
5987        default:
5988            break;
5989        }
5990
5991        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
5992        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
5993            /* For each HFR frame rate, need to advertise one variable fps range
5994             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
5995             * [120, 120]. While camcorder preview alone is running [30, 120] is
5996             * set by the app. When video recording is started, [120, 120] is
5997             * set. This way sensor configuration does not change when recording
5998             * is started */
5999
6000            /* (width, height, fps_min, fps_max, batch_size_max) */
6001            available_hfr_configs.add(
6002                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6003            available_hfr_configs.add(
6004                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6005            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6006            available_hfr_configs.add(fps);
6007            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6008
6009            /* (width, height, fps_min, fps_max, batch_size_max) */
6010            available_hfr_configs.add(
6011                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6012            available_hfr_configs.add(
6013                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6014            available_hfr_configs.add(fps);
6015            available_hfr_configs.add(fps);
6016            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6017       }
6018    }
6019    //Advertise HFR capability only if the property is set
6020    memset(prop, 0, sizeof(prop));
6021    property_get("persist.camera.hal3hfr.enable", prop, "1");
6022    uint8_t hfrEnable = (uint8_t)atoi(prop);
6023
6024    if(hfrEnable && available_hfr_configs.array()) {
6025        staticInfo.update(
6026                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6027                available_hfr_configs.array(), available_hfr_configs.size());
6028    }
6029
6030    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6031    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6032                      &max_jpeg_size, 1);
6033
6034    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6035    size_t size = 0;
6036    count = CAM_EFFECT_MODE_MAX;
6037    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6038    for (size_t i = 0; i < count; i++) {
6039        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6040                gCamCapability[cameraId]->supported_effects[i]);
6041        if (NAME_NOT_FOUND != val) {
6042            avail_effects[size] = (uint8_t)val;
6043            size++;
6044        }
6045    }
6046    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6047                      avail_effects,
6048                      size);
6049
6050    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6051    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6052    size_t supported_scene_modes_cnt = 0;
6053    count = CAM_SCENE_MODE_MAX;
6054    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6055    for (size_t i = 0; i < count; i++) {
6056        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6057                CAM_SCENE_MODE_OFF) {
6058            int val = lookupFwkName(SCENE_MODES_MAP,
6059                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
6060                    gCamCapability[cameraId]->supported_scene_modes[i]);
6061            if (NAME_NOT_FOUND != val) {
6062                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6063                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6064                supported_scene_modes_cnt++;
6065            }
6066        }
6067    }
6068    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6069                      avail_scene_modes,
6070                      supported_scene_modes_cnt);
6071
6072    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6073    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6074                      supported_scene_modes_cnt,
6075                      CAM_SCENE_MODE_MAX,
6076                      scene_mode_overrides,
6077                      supported_indexes,
6078                      cameraId);
6079
6080    if (supported_scene_modes_cnt == 0) {
6081        supported_scene_modes_cnt = 1;
6082        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6083    }
6084
6085    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6086            scene_mode_overrides, supported_scene_modes_cnt * 3);
6087
6088    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6089                                         ANDROID_CONTROL_MODE_AUTO,
6090                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6091    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6092            available_control_modes,
6093            3);
6094
6095    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6096    size = 0;
6097    count = CAM_ANTIBANDING_MODE_MAX;
6098    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6099    for (size_t i = 0; i < count; i++) {
6100        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6101                gCamCapability[cameraId]->supported_antibandings[i]);
6102        if (NAME_NOT_FOUND != val) {
6103            avail_antibanding_modes[size] = (uint8_t)val;
6104            size++;
6105        }
6106
6107    }
6108    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6109                      avail_antibanding_modes,
6110                      size);
6111
6112    uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
6113    size = 0;
6114    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6115    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6116    if (0 == count) {
6117        avail_abberation_modes[0] =
6118                ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6119        size++;
6120    } else {
6121        for (size_t i = 0; i < count; i++) {
6122            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6123                    gCamCapability[cameraId]->aberration_modes[i]);
6124            if (NAME_NOT_FOUND != val) {
6125                avail_abberation_modes[size] = (uint8_t)val;
6126                size++;
6127            } else {
6128                ALOGE("%s: Invalid CAC mode %d", __func__,
6129                        gCamCapability[cameraId]->aberration_modes[i]);
6130                break;
6131            }
6132        }
6133
6134    }
6135    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6136            avail_abberation_modes,
6137            size);
6138
6139    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6140    size = 0;
6141    count = CAM_FOCUS_MODE_MAX;
6142    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6143    for (size_t i = 0; i < count; i++) {
6144        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6145                gCamCapability[cameraId]->supported_focus_modes[i]);
6146        if (NAME_NOT_FOUND != val) {
6147            avail_af_modes[size] = (uint8_t)val;
6148            size++;
6149        }
6150    }
6151    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6152                      avail_af_modes,
6153                      size);
6154
6155    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6156    size = 0;
6157    count = CAM_WB_MODE_MAX;
6158    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6159    for (size_t i = 0; i < count; i++) {
6160        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6161                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6162                gCamCapability[cameraId]->supported_white_balances[i]);
6163        if (NAME_NOT_FOUND != val) {
6164            avail_awb_modes[size] = (uint8_t)val;
6165            size++;
6166        }
6167    }
6168    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6169                      avail_awb_modes,
6170                      size);
6171
6172    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6173    count = CAM_FLASH_FIRING_LEVEL_MAX;
6174    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6175            count);
6176    for (size_t i = 0; i < count; i++) {
6177        available_flash_levels[i] =
6178                gCamCapability[cameraId]->supported_firing_levels[i];
6179    }
6180    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6181            available_flash_levels, count);
6182
6183    uint8_t flashAvailable;
6184    if (gCamCapability[cameraId]->flash_available)
6185        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6186    else
6187        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6188    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6189            &flashAvailable, 1);
6190
6191    Vector<uint8_t> avail_ae_modes;
6192    count = CAM_AE_MODE_MAX;
6193    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6194    for (size_t i = 0; i < count; i++) {
6195        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6196    }
6197    if (flashAvailable) {
6198        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6199        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6200    }
6201    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6202                      avail_ae_modes.array(),
6203                      avail_ae_modes.size());
6204
6205    int32_t sensitivity_range[2];
6206    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6207    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6208    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6209                      sensitivity_range,
6210                      sizeof(sensitivity_range) / sizeof(int32_t));
6211
6212    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6213                      &gCamCapability[cameraId]->max_analog_sensitivity,
6214                      1);
6215
6216    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6217    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6218                      &sensor_orientation,
6219                      1);
6220
6221    int32_t max_output_streams[] = {
6222            MAX_STALLING_STREAMS,
6223            MAX_PROCESSED_STREAMS,
6224            MAX_RAW_STREAMS};
6225    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6226            max_output_streams,
6227            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6228
6229    uint8_t avail_leds = 0;
6230    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6231                      &avail_leds, 0);
6232
6233    uint8_t focus_dist_calibrated;
6234    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6235            gCamCapability[cameraId]->focus_dist_calibrated);
6236    if (NAME_NOT_FOUND != val) {
6237        focus_dist_calibrated = (uint8_t)val;
6238        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6239                     &focus_dist_calibrated, 1);
6240    }
6241
6242    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6243    size = 0;
6244    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6245            MAX_TEST_PATTERN_CNT);
6246    for (size_t i = 0; i < count; i++) {
6247        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6248                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6249        if (NAME_NOT_FOUND != testpatternMode) {
6250            avail_testpattern_modes[size] = testpatternMode;
6251            size++;
6252        }
6253    }
6254    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6255                      avail_testpattern_modes,
6256                      size);
6257
6258    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6259    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6260                      &max_pipeline_depth,
6261                      1);
6262
6263    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6264    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6265                      &partial_result_count,
6266                       1);
6267
6268    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6269    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6270
6271    Vector<uint8_t> available_capabilities;
6272    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6273    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6274    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6275    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6276    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6277    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6278    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6279    if (hfrEnable && available_hfr_configs.array()) {
6280        available_capabilities.add(
6281                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6282    }
6283
6284    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6285        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6286    }
6287    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6288            available_capabilities.array(),
6289            available_capabilities.size());
6290
6291    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6292    //BURST_CAPTURE.
6293    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6294            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6295
6296    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6297            &aeLockAvailable, 1);
6298
6299    //awbLockAvailable to be set to true if capabilities has
6300    //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6301    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6302            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6303
6304    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6305            &awbLockAvailable, 1);
6306
6307    int32_t max_input_streams = 1;
6308    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6309                      &max_input_streams,
6310                      1);
6311
6312    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6313    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6314            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6315            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6316            HAL_PIXEL_FORMAT_YCbCr_420_888};
6317    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6318                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6319
6320    int32_t max_latency = (limitedDevice) ?
6321            CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6322    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6323                      &max_latency,
6324                      1);
6325
6326    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6327                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6328    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6329            available_hot_pixel_modes,
6330            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6331
6332    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6333                                         ANDROID_SHADING_MODE_FAST,
6334                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6335    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6336                      available_shading_modes,
6337                      3);
6338
6339    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6340                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6341    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6342                      available_lens_shading_map_modes,
6343                      2);
6344
6345    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6346                                      ANDROID_EDGE_MODE_FAST,
6347                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6348                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6349    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6350            available_edge_modes,
6351            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6352
6353    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6354                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6355                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6356                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6357                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6358    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6359            available_noise_red_modes,
6360            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6361
6362    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6363                                         ANDROID_TONEMAP_MODE_FAST,
6364                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6365    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6366            available_tonemap_modes,
6367            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6368
6369    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6370    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6371            available_hot_pixel_map_modes,
6372            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6373
6374    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6375            gCamCapability[cameraId]->reference_illuminant1);
6376    if (NAME_NOT_FOUND != val) {
6377        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6378        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6379    }
6380
6381    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6382            gCamCapability[cameraId]->reference_illuminant2);
6383    if (NAME_NOT_FOUND != val) {
6384        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6385        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6386    }
6387
6388    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6389            (void *)gCamCapability[cameraId]->forward_matrix1,
6390            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6391
6392    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6393            (void *)gCamCapability[cameraId]->forward_matrix2,
6394            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6395
6396    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6397            (void *)gCamCapability[cameraId]->color_transform1,
6398            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6399
6400    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6401            (void *)gCamCapability[cameraId]->color_transform2,
6402            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6403
6404    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6405            (void *)gCamCapability[cameraId]->calibration_transform1,
6406            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6407
6408    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6409            (void *)gCamCapability[cameraId]->calibration_transform2,
6410            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6411
6412    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6413       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6414       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6415       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6416       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6417       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6418       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6419       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6420       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6421       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6422       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6423       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6424       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6425       ANDROID_JPEG_GPS_COORDINATES,
6426       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6427       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6428       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6429       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6430       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6431       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6432       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6433       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6434       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6435       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6436       ANDROID_STATISTICS_FACE_DETECT_MODE,
6437       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6438       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6439       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6440       ANDROID_BLACK_LEVEL_LOCK };
6441
6442    size_t request_keys_cnt =
6443            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6444    Vector<int32_t> available_request_keys;
6445    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6446    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6447        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6448    }
6449
6450    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6451            available_request_keys.array(), available_request_keys.size());
6452
6453    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6454       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6455       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6456       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6457       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6458       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6459       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6460       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6461       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6462       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6463       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6464       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6465       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6466       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6467       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6468       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6469       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6470       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6471       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6472       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6473       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6474       ANDROID_STATISTICS_FACE_SCORES};
6475    size_t result_keys_cnt =
6476            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6477
6478    Vector<int32_t> available_result_keys;
6479    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6480    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6481        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6482    }
6483    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6484       available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6485       available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6486    }
6487    if (supportedFaceDetectMode == 1) {
6488        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6489        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6490    } else if ((supportedFaceDetectMode == 2) ||
6491            (supportedFaceDetectMode == 3)) {
6492        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6493        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6494    }
6495    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6496            available_result_keys.array(), available_result_keys.size());
6497
6498    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6499       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6500       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6501       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6502       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6503       ANDROID_SCALER_CROPPING_TYPE,
6504       ANDROID_SYNC_MAX_LATENCY,
6505       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6506       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6507       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6508       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6509       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6510       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6511       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6512       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6513       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6514       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6515       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6516       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6517       ANDROID_LENS_FACING,
6518       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6519       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6520       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6521       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6522       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6523       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6524       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6525       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6526       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6527       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6528       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6529       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6530       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6531       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6532       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6533       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6534       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6535       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6536       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6537       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6538       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6539       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6540       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6541       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6542       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6543       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6544       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6545       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6546       ANDROID_TONEMAP_MAX_CURVE_POINTS,
6547       ANDROID_CONTROL_AVAILABLE_MODES,
6548       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6549       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6550       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6551       ANDROID_SHADING_AVAILABLE_MODES,
6552       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6553    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6554                      available_characteristics_keys,
6555                      sizeof(available_characteristics_keys)/sizeof(int32_t));
6556
6557    /*available stall durations depend on the hw + sw and will be different for different devices */
6558    /*have to add for raw after implementation*/
6559    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6560    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6561
6562    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6563    size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6564            MAX_SIZES_CNT);
6565    size_t available_stall_size = count * 4;
6566    int64_t available_stall_durations[available_stall_size];
6567    idx = 0;
6568    for (uint32_t j = 0; j < stall_formats_count; j++) {
6569       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6570          for (uint32_t i = 0; i < count; i++) {
6571             available_stall_durations[idx]   = stall_formats[j];
6572             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6573             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6574             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6575             idx+=4;
6576          }
6577       } else {
6578          for (uint32_t i = 0; i < raw_count; i++) {
6579             available_stall_durations[idx]   = stall_formats[j];
6580             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6581             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6582             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6583             idx+=4;
6584          }
6585       }
6586    }
6587    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6588                      available_stall_durations,
6589                      idx);
6590    //QCAMERA3_OPAQUE_RAW
6591    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6592    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6593    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6594    case LEGACY_RAW:
6595        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6596            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6597        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6598            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6599        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6600            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6601        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6602        break;
6603    case MIPI_RAW:
6604        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6605            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6606        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6607            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6608        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6609            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6610        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6611        break;
6612    default:
6613        ALOGE("%s: unknown opaque_raw_format %d", __func__,
6614                gCamCapability[cameraId]->opaque_raw_fmt);
6615        break;
6616    }
6617    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6618
6619    int32_t strides[3*raw_count];
6620    for (size_t i = 0; i < raw_count; i++) {
6621        cam_stream_buf_plane_info_t buf_planes;
6622        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6623        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6624        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6625            &gCamCapability[cameraId]->padding_info, &buf_planes);
6626        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6627    }
6628    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6629            3*raw_count);
6630
6631    gStaticMetadata[cameraId] = staticInfo.release();
6632    return rc;
6633}
6634
6635/*===========================================================================
6636 * FUNCTION   : makeTable
6637 *
6638 * DESCRIPTION: make a table of sizes
6639 *
6640 * PARAMETERS :
6641 *
6642 *
6643 *==========================================================================*/
6644void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
6645        size_t max_size, int32_t *sizeTable)
6646{
6647    size_t j = 0;
6648    if (size > max_size) {
6649       size = max_size;
6650    }
6651    for (size_t i = 0; i < size; i++) {
6652        sizeTable[j] = dimTable[i].width;
6653        sizeTable[j+1] = dimTable[i].height;
6654        j+=2;
6655    }
6656}
6657
6658/*===========================================================================
6659 * FUNCTION   : makeFPSTable
6660 *
6661 * DESCRIPTION: make a table of fps ranges
6662 *
6663 * PARAMETERS :
6664 *
6665 *==========================================================================*/
6666void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
6667        size_t max_size, int32_t *fpsRangesTable)
6668{
6669    size_t j = 0;
6670    if (size > max_size) {
6671       size = max_size;
6672    }
6673    for (size_t i = 0; i < size; i++) {
6674        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
6675        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
6676        j+=2;
6677    }
6678}
6679
6680/*===========================================================================
6681 * FUNCTION   : makeOverridesList
6682 *
6683 * DESCRIPTION: make a list of scene mode overrides
6684 *
6685 * PARAMETERS :
6686 *
6687 *
6688 *==========================================================================*/
6689void QCamera3HardwareInterface::makeOverridesList(
6690        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
6691        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
6692{
6693    /*daemon will give a list of overrides for all scene modes.
6694      However we should send the fwk only the overrides for the scene modes
6695      supported by the framework*/
6696    size_t j = 0;
6697    if (size > max_size) {
6698       size = max_size;
6699    }
6700    size_t focus_count = CAM_FOCUS_MODE_MAX;
6701    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
6702            focus_count);
6703    for (size_t i = 0; i < size; i++) {
6704        bool supt = false;
6705        size_t index = supported_indexes[i];
6706        overridesList[j] = gCamCapability[camera_id]->flash_available ?
6707                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
6708        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6709                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6710                overridesTable[index].awb_mode);
6711        if (NAME_NOT_FOUND != val) {
6712            overridesList[j+1] = (uint8_t)val;
6713        }
6714        uint8_t focus_override = overridesTable[index].af_mode;
6715        for (size_t k = 0; k < focus_count; k++) {
6716           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
6717              supt = true;
6718              break;
6719           }
6720        }
6721        if (supt) {
6722            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6723                    focus_override);
6724            if (NAME_NOT_FOUND != val) {
6725                overridesList[j+2] = (uint8_t)val;
6726            }
6727        } else {
6728           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
6729        }
6730        j+=3;
6731    }
6732}
6733
6734/*===========================================================================
6735 * FUNCTION   : filterJpegSizes
6736 *
6737 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
6738 *              could be downscaled to
6739 *
6740 * PARAMETERS :
6741 *
6742 * RETURN     : length of jpegSizes array
6743 *==========================================================================*/
6744
6745size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
6746        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
6747        uint8_t downscale_factor)
6748{
6749    if (0 == downscale_factor) {
6750        downscale_factor = 1;
6751    }
6752
6753    int32_t min_width = active_array_size.width / downscale_factor;
6754    int32_t min_height = active_array_size.height / downscale_factor;
6755    size_t jpegSizesCnt = 0;
6756    if (processedSizesCnt > maxCount) {
6757        processedSizesCnt = maxCount;
6758    }
6759    for (size_t i = 0; i < processedSizesCnt; i+=2) {
6760        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
6761            jpegSizes[jpegSizesCnt] = processedSizes[i];
6762            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
6763            jpegSizesCnt += 2;
6764        }
6765    }
6766    return jpegSizesCnt;
6767}
6768
6769/*===========================================================================
6770 * FUNCTION   : getPreviewHalPixelFormat
6771 *
6772 * DESCRIPTION: convert the format to type recognized by framework
6773 *
6774 * PARAMETERS : format : the format from backend
6775 *
6776 ** RETURN    : format recognized by framework
6777 *
6778 *==========================================================================*/
6779int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
6780{
6781    int32_t halPixelFormat;
6782
6783    switch (format) {
6784    case CAM_FORMAT_YUV_420_NV12:
6785        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
6786        break;
6787    case CAM_FORMAT_YUV_420_NV21:
6788        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6789        break;
6790    case CAM_FORMAT_YUV_420_NV21_ADRENO:
6791        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
6792        break;
6793    case CAM_FORMAT_YUV_420_YV12:
6794        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
6795        break;
6796    case CAM_FORMAT_YUV_422_NV16:
6797    case CAM_FORMAT_YUV_422_NV61:
6798    default:
6799        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
6800        break;
6801    }
6802    return halPixelFormat;
6803}
6804
6805/*===========================================================================
6806 * FUNCTION   : computeNoiseModelEntryS
6807 *
6808 * DESCRIPTION: function to map a given sensitivity to the S noise
6809 *              model parameters in the DNG noise model.
6810 *
6811 * PARAMETERS : sens : the sensor sensitivity
6812 *
6813 ** RETURN    : S (sensor amplification) noise
6814 *
6815 *==========================================================================*/
6816double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
6817    double s = gCamCapability[mCameraId]->gradient_S * sens +
6818            gCamCapability[mCameraId]->offset_S;
6819    return ((s < 0.0) ? 0.0 : s);
6820}
6821
6822/*===========================================================================
6823 * FUNCTION   : computeNoiseModelEntryO
6824 *
6825 * DESCRIPTION: function to map a given sensitivity to the O noise
6826 *              model parameters in the DNG noise model.
6827 *
6828 * PARAMETERS : sens : the sensor sensitivity
6829 *
6830 ** RETURN    : O (sensor readout) noise
6831 *
6832 *==========================================================================*/
6833double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
6834    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
6835    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
6836            1.0 : (1.0 * sens / max_analog_sens);
6837    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
6838            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
6839    return ((o < 0.0) ? 0.0 : o);
6840}
6841
6842/*===========================================================================
6843 * FUNCTION   : getSensorSensitivity
6844 *
6845 * DESCRIPTION: convert iso_mode to an integer value
6846 *
6847 * PARAMETERS : iso_mode : the iso_mode supported by sensor
6848 *
6849 ** RETURN    : sensitivity supported by sensor
6850 *
6851 *==========================================================================*/
6852int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
6853{
6854    int32_t sensitivity;
6855
6856    switch (iso_mode) {
6857    case CAM_ISO_MODE_100:
6858        sensitivity = 100;
6859        break;
6860    case CAM_ISO_MODE_200:
6861        sensitivity = 200;
6862        break;
6863    case CAM_ISO_MODE_400:
6864        sensitivity = 400;
6865        break;
6866    case CAM_ISO_MODE_800:
6867        sensitivity = 800;
6868        break;
6869    case CAM_ISO_MODE_1600:
6870        sensitivity = 1600;
6871        break;
6872    default:
6873        sensitivity = -1;
6874        break;
6875    }
6876    return sensitivity;
6877}
6878
6879/*===========================================================================
6880 * FUNCTION   : getCamInfo
6881 *
6882 * DESCRIPTION: query camera capabilities
6883 *
6884 * PARAMETERS :
6885 *   @cameraId  : camera Id
6886 *   @info      : camera info struct to be filled in with camera capabilities
6887 *
6888 * RETURN     : int type of status
6889 *              NO_ERROR  -- success
6890 *              none-zero failure code
6891 *==========================================================================*/
6892int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
6893        struct camera_info *info)
6894{
6895    ATRACE_CALL();
6896    int rc = 0;
6897
6898    pthread_mutex_lock(&gCamLock);
6899    if (NULL == gCamCapability[cameraId]) {
6900        rc = initCapabilities(cameraId);
6901        if (rc < 0) {
6902            pthread_mutex_unlock(&gCamLock);
6903            return rc;
6904        }
6905    }
6906
6907    if (NULL == gStaticMetadata[cameraId]) {
6908        rc = initStaticMetadata(cameraId);
6909        if (rc < 0) {
6910            pthread_mutex_unlock(&gCamLock);
6911            return rc;
6912        }
6913    }
6914
6915    switch(gCamCapability[cameraId]->position) {
6916    case CAM_POSITION_BACK:
6917        info->facing = CAMERA_FACING_BACK;
6918        break;
6919
6920    case CAM_POSITION_FRONT:
6921        info->facing = CAMERA_FACING_FRONT;
6922        break;
6923
6924    default:
6925        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
6926        rc = -1;
6927        break;
6928    }
6929
6930
6931    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
6932    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
6933    info->static_camera_characteristics = gStaticMetadata[cameraId];
6934
6935    //For now assume both cameras can operate independently.
6936    info->conflicting_devices = NULL;
6937    info->conflicting_devices_length = 0;
6938
6939    //resource cost is 100 * MIN(1.0, m/M),
6940    //where m is throughput requirement with maximum stream configuration
6941    //and M is CPP maximum throughput.
6942    float max_fps = 0.0;
6943    for (uint32_t i = 0;
6944            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
6945        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
6946            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
6947    }
6948    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
6949            gCamCapability[cameraId]->active_array_size.width *
6950            gCamCapability[cameraId]->active_array_size.height * max_fps /
6951            gCamCapability[cameraId]->max_pixel_bandwidth;
6952    info->resource_cost = 100 * MIN(1.0, ratio);
6953    ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
6954            info->resource_cost);
6955
6956    pthread_mutex_unlock(&gCamLock);
6957    return rc;
6958}
6959
6960/*===========================================================================
6961 * FUNCTION   : translateCapabilityToMetadata
6962 *
6963 * DESCRIPTION: translate the capability into camera_metadata_t
6964 *
6965 * PARAMETERS : type of the request
6966 *
6967 *
6968 * RETURN     : success: camera_metadata_t*
6969 *              failure: NULL
6970 *
6971 *==========================================================================*/
6972camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
6973{
6974    if (mDefaultMetadata[type] != NULL) {
6975        return mDefaultMetadata[type];
6976    }
6977    //first time we are handling this request
6978    //fill up the metadata structure using the wrapper class
6979    CameraMetadata settings;
6980    //translate from cam_capability_t to camera_metadata_tag_t
6981    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
6982    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
6983    int32_t defaultRequestID = 0;
6984    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
6985
6986    /* OIS disable */
6987    char ois_prop[PROPERTY_VALUE_MAX];
6988    memset(ois_prop, 0, sizeof(ois_prop));
6989    property_get("persist.camera.ois.disable", ois_prop, "0");
6990    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
6991
6992    /* Force video to use OIS */
6993    char videoOisProp[PROPERTY_VALUE_MAX];
6994    memset(videoOisProp, 0, sizeof(videoOisProp));
6995    property_get("persist.camera.ois.video", videoOisProp, "1");
6996    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
6997
6998    // EIS enable/disable
6999    char eis_prop[PROPERTY_VALUE_MAX];
7000    memset(eis_prop, 0, sizeof(eis_prop));
7001    property_get("persist.camera.eis.enable", eis_prop, "0");
7002    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7003
7004    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7005    // This is a bit hacky. EIS is enabled only when the above setprop
7006    // is set to non-zero value and on back camera (for 2015 Nexus).
7007    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7008    // configureStream is called before this function. In other words,
7009    // we cannot guarantee the app will call configureStream before
7010    // calling createDefaultRequest.
7011    const bool eisEnabled = facingBack && eis_prop_set;
7012
7013    uint8_t controlIntent = 0;
7014    uint8_t focusMode;
7015    uint8_t vsMode;
7016    uint8_t optStabMode;
7017    uint8_t cacMode;
7018    uint8_t edge_mode;
7019    uint8_t noise_red_mode;
7020    uint8_t tonemap_mode;
7021    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7022    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7023    switch (type) {
7024      case CAMERA3_TEMPLATE_PREVIEW:
7025        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7026        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7027        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7028        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7029        edge_mode = ANDROID_EDGE_MODE_FAST;
7030        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7031        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7032        break;
7033      case CAMERA3_TEMPLATE_STILL_CAPTURE:
7034        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7035        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7036        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7037        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7038        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7039        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7040        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7041        break;
7042      case CAMERA3_TEMPLATE_VIDEO_RECORD:
7043        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7044        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7045        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7046        if (eisEnabled) {
7047            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7048        }
7049        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7050        edge_mode = ANDROID_EDGE_MODE_FAST;
7051        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7052        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7053        if (forceVideoOis)
7054            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7055        break;
7056      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7057        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7058        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7059        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7060        if (eisEnabled) {
7061            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7062        }
7063        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7064        edge_mode = ANDROID_EDGE_MODE_FAST;
7065        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7066        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7067        if (forceVideoOis)
7068            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7069        break;
7070      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7071        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7072        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7073        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7074        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7075        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7076        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7077        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7078        break;
7079      case CAMERA3_TEMPLATE_MANUAL:
7080        edge_mode = ANDROID_EDGE_MODE_FAST;
7081        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7082        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7083        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7084        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7085        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7086        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7087        break;
7088      default:
7089        edge_mode = ANDROID_EDGE_MODE_FAST;
7090        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7091        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7092        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7093        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7094        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7095        break;
7096    }
7097    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7098    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7099    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7100    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7101        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7102    }
7103    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7104
7105    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7106            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7107        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7108    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7109            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7110            || ois_disable)
7111        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7112    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7113
7114    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7115            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7116
7117    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7118    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7119
7120    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7121    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7122
7123    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7124    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7125
7126    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7127    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7128
7129    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7130    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7131
7132    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7133    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7134
7135    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7136    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7137
7138    /*flash*/
7139    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7140    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7141
7142    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7143    settings.update(ANDROID_FLASH_FIRING_POWER,
7144            &flashFiringLevel, 1);
7145
7146    /* lens */
7147    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7148    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7149
7150    if (gCamCapability[mCameraId]->filter_densities_count) {
7151        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7152        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7153                        gCamCapability[mCameraId]->filter_densities_count);
7154    }
7155
7156    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7157    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7158
7159    float default_focus_distance = 0;
7160    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7161
7162    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7163    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7164
7165    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7166    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7167
7168    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7169    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7170
7171    /* face detection (default to OFF) */
7172    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7173    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7174
7175    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7176    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7177
7178    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7179    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7180
7181    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7182    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7183
7184    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7185    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7186
7187    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7188    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7189
7190    /* Exposure time(Update the Min Exposure Time)*/
7191    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7192    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7193
7194    /* frame duration */
7195    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7196    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7197
7198    /* sensitivity */
7199    static const int32_t default_sensitivity = 100;
7200    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7201
7202    /*edge mode*/
7203    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7204
7205    /*noise reduction mode*/
7206    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7207
7208    /*color correction mode*/
7209    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7210    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7211
7212    /*transform matrix mode*/
7213    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7214
7215    int32_t scaler_crop_region[4];
7216    scaler_crop_region[0] = 0;
7217    scaler_crop_region[1] = 0;
7218    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7219    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7220    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7221
7222    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7223    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7224
7225    /*focus distance*/
7226    float focus_distance = 0.0;
7227    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7228
7229    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7230    float max_range = 0.0;
7231    float max_fixed_fps = 0.0;
7232    int32_t fps_range[2] = {0, 0};
7233    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7234            i++) {
7235        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7236            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7237        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7238                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7239                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7240            if (range > max_range) {
7241                fps_range[0] =
7242                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7243                fps_range[1] =
7244                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7245                max_range = range;
7246            }
7247        } else {
7248            if (range < 0.01 && max_fixed_fps <
7249                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7250                fps_range[0] =
7251                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7252                fps_range[1] =
7253                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7254                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7255            }
7256        }
7257    }
7258    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7259
7260    /*precapture trigger*/
7261    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7262    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7263
7264    /*af trigger*/
7265    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7266    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7267
7268    /* ae & af regions */
7269    int32_t active_region[] = {
7270            gCamCapability[mCameraId]->active_array_size.left,
7271            gCamCapability[mCameraId]->active_array_size.top,
7272            gCamCapability[mCameraId]->active_array_size.left +
7273                    gCamCapability[mCameraId]->active_array_size.width,
7274            gCamCapability[mCameraId]->active_array_size.top +
7275                    gCamCapability[mCameraId]->active_array_size.height,
7276            0};
7277    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7278            sizeof(active_region) / sizeof(active_region[0]));
7279    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7280            sizeof(active_region) / sizeof(active_region[0]));
7281
7282    /* black level lock */
7283    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7284    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7285
7286    /* lens shading map mode */
7287    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7288    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7289        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7290    }
7291    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7292
7293    //special defaults for manual template
7294    if (type == CAMERA3_TEMPLATE_MANUAL) {
7295        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7296        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7297
7298        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7299        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7300
7301        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7302        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7303
7304        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7305        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7306
7307        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7308        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7309
7310        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7311        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7312    }
7313
7314
7315    /* TNR
7316     * We'll use this location to determine which modes TNR will be set.
7317     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7318     * This is not to be confused with linking on a per stream basis that decision
7319     * is still on per-session basis and will be handled as part of config stream
7320     */
7321    uint8_t tnr_enable = 0;
7322
7323    if (m_bTnrPreview || m_bTnrVideo) {
7324
7325        switch (type) {
7326            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7327            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7328                    tnr_enable = 1;
7329                    break;
7330
7331            default:
7332                    tnr_enable = 0;
7333                    break;
7334        }
7335
7336        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7337        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7338        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7339
7340        CDBG("%s: TNR:%d with process plate %d for template:%d",
7341                            __func__, tnr_enable, tnr_process_type, type);
7342    }
7343
7344    /* CDS default */
7345    char prop[PROPERTY_VALUE_MAX];
7346    memset(prop, 0, sizeof(prop));
7347    property_get("persist.camera.CDS", prop, "Auto");
7348    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7349    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7350    if (CAM_CDS_MODE_MAX == cds_mode) {
7351        cds_mode = CAM_CDS_MODE_AUTO;
7352    }
7353    m_CdsPreference = cds_mode;
7354
7355    /* Disabling CDS in templates which have TNR enabled*/
7356    if (tnr_enable)
7357        cds_mode = CAM_CDS_MODE_OFF;
7358
7359    int32_t mode = cds_mode;
7360    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7361    mDefaultMetadata[type] = settings.release();
7362
7363    return mDefaultMetadata[type];
7364}
7365
7366/*===========================================================================
7367 * FUNCTION   : setFrameParameters
7368 *
7369 * DESCRIPTION: set parameters per frame as requested in the metadata from
7370 *              framework
7371 *
7372 * PARAMETERS :
7373 *   @request   : request that needs to be serviced
7374 *   @streamID : Stream ID of all the requested streams
7375 *   @blob_request: Whether this request is a blob request or not
7376 *
7377 * RETURN     : success: NO_ERROR
7378 *              failure:
7379 *==========================================================================*/
7380int QCamera3HardwareInterface::setFrameParameters(
7381                    camera3_capture_request_t *request,
7382                    cam_stream_ID_t streamID,
7383                    int blob_request,
7384                    uint32_t snapshotStreamId)
7385{
7386    /*translate from camera_metadata_t type to parm_type_t*/
7387    int rc = 0;
7388    int32_t hal_version = CAM_HAL_V3;
7389
7390    clear_metadata_buffer(mParameters);
7391    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7392        ALOGE("%s: Failed to set hal version in the parameters", __func__);
7393        return BAD_VALUE;
7394    }
7395
7396    /*we need to update the frame number in the parameters*/
7397    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7398            request->frame_number)) {
7399        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7400        return BAD_VALUE;
7401    }
7402
7403    /* Update stream id of all the requested buffers */
7404    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7405        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7406        return BAD_VALUE;
7407    }
7408
7409    if (mUpdateDebugLevel) {
7410        uint32_t dummyDebugLevel = 0;
7411        /* The value of dummyDebugLevel is irrelavent. On
7412         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7413        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7414                dummyDebugLevel)) {
7415            ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7416            return BAD_VALUE;
7417        }
7418        mUpdateDebugLevel = false;
7419    }
7420
7421    if(request->settings != NULL){
7422        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7423        if (blob_request)
7424            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7425    }
7426
7427    return rc;
7428}
7429
7430/*===========================================================================
7431 * FUNCTION   : setReprocParameters
7432 *
7433 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7434 *              return it.
7435 *
7436 * PARAMETERS :
7437 *   @request   : request that needs to be serviced
7438 *
7439 * RETURN     : success: NO_ERROR
7440 *              failure:
7441 *==========================================================================*/
7442int32_t QCamera3HardwareInterface::setReprocParameters(
7443        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7444        uint32_t snapshotStreamId)
7445{
7446    /*translate from camera_metadata_t type to parm_type_t*/
7447    int rc = 0;
7448
7449    if (NULL == request->settings){
7450        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7451        return BAD_VALUE;
7452    }
7453
7454    if (NULL == reprocParam) {
7455        ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7456        return BAD_VALUE;
7457    }
7458    clear_metadata_buffer(reprocParam);
7459
7460    /*we need to update the frame number in the parameters*/
7461    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7462            request->frame_number)) {
7463        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7464        return BAD_VALUE;
7465    }
7466
7467    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7468    if (rc < 0) {
7469        ALOGE("%s: Failed to translate reproc request", __func__);
7470        return rc;
7471    }
7472
7473    CameraMetadata frame_settings;
7474    frame_settings = request->settings;
7475    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7476            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7477        int32_t *crop_count =
7478                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7479        int32_t *crop_data =
7480                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7481        int32_t *roi_map =
7482                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7483        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7484            cam_crop_data_t crop_meta;
7485            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7486            crop_meta.num_of_streams = 1;
7487            crop_meta.crop_info[0].crop.left   = crop_data[0];
7488            crop_meta.crop_info[0].crop.top    = crop_data[1];
7489            crop_meta.crop_info[0].crop.width  = crop_data[2];
7490            crop_meta.crop_info[0].crop.height = crop_data[3];
7491
7492            crop_meta.crop_info[0].roi_map.left =
7493                    roi_map[0];
7494            crop_meta.crop_info[0].roi_map.top =
7495                    roi_map[1];
7496            crop_meta.crop_info[0].roi_map.width =
7497                    roi_map[2];
7498            crop_meta.crop_info[0].roi_map.height =
7499                    roi_map[3];
7500
7501            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7502                rc = BAD_VALUE;
7503            }
7504            CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7505                    __func__,
7506                    request->input_buffer->stream,
7507                    crop_meta.crop_info[0].crop.left,
7508                    crop_meta.crop_info[0].crop.top,
7509                    crop_meta.crop_info[0].crop.width,
7510                    crop_meta.crop_info[0].crop.height);
7511            CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7512                    __func__,
7513                    request->input_buffer->stream,
7514                    crop_meta.crop_info[0].roi_map.left,
7515                    crop_meta.crop_info[0].roi_map.top,
7516                    crop_meta.crop_info[0].roi_map.width,
7517                    crop_meta.crop_info[0].roi_map.height);
7518            } else {
7519                ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7520            }
7521    } else {
7522        ALOGE("%s: No crop data from matching output stream", __func__);
7523    }
7524
7525    /* These settings are not needed for regular requests so handle them specially for
7526       reprocess requests; information needed for EXIF tags */
7527    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7528        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7529                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7530        if (NAME_NOT_FOUND != val) {
7531            uint32_t flashMode = (uint32_t)val;
7532            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7533                rc = BAD_VALUE;
7534            }
7535        } else {
7536            ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7537                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7538        }
7539    } else {
7540        CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7541    }
7542
7543    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7544        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7545        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7546            rc = BAD_VALUE;
7547        }
7548    } else {
7549        CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7550    }
7551
7552    return rc;
7553}
7554
7555/*===========================================================================
7556 * FUNCTION   : saveRequestSettings
7557 *
7558 * DESCRIPTION: Add any settings that might have changed to the request settings
7559 *              and save the settings to be applied on the frame
7560 *
7561 * PARAMETERS :
7562 *   @jpegMetadata : the extracted and/or modified jpeg metadata
7563 *   @request      : request with initial settings
7564 *
7565 * RETURN     :
7566 * camera_metadata_t* : pointer to the saved request settings
7567 *==========================================================================*/
7568camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7569        const CameraMetadata &jpegMetadata,
7570        camera3_capture_request_t *request)
7571{
7572    camera_metadata_t *resultMetadata;
7573    CameraMetadata camMetadata;
7574    camMetadata = request->settings;
7575
7576    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7577        int32_t thumbnail_size[2];
7578        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7579        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7580        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7581                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7582    }
7583
7584    resultMetadata = camMetadata.release();
7585    return resultMetadata;
7586}
7587
7588/*===========================================================================
7589 * FUNCTION   : setHalFpsRange
7590 *
7591 * DESCRIPTION: set FPS range parameter
7592 *
7593 *
7594 * PARAMETERS :
7595 *   @settings    : Metadata from framework
7596 *   @hal_metadata: Metadata buffer
7597 *
7598 *
7599 * RETURN     : success: NO_ERROR
7600 *              failure:
7601 *==========================================================================*/
7602int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7603        metadata_buffer_t *hal_metadata)
7604{
7605    int32_t rc = NO_ERROR;
7606    cam_fps_range_t fps_range;
7607    fps_range.min_fps = (float)
7608            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7609    fps_range.max_fps = (float)
7610            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7611    fps_range.video_min_fps = fps_range.min_fps;
7612    fps_range.video_max_fps = fps_range.max_fps;
7613
7614    CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7615            fps_range.min_fps, fps_range.max_fps);
7616    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7617     * follows:
7618     * ---------------------------------------------------------------|
7619     *      Video stream is absent in configure_streams               |
7620     *    (Camcorder preview before the first video record            |
7621     * ---------------------------------------------------------------|
7622     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7623     *                   |             |             | vid_min/max_fps|
7624     * ---------------------------------------------------------------|
7625     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7626     *                   |-------------|-------------|----------------|
7627     *                   |  [240, 240] |     240     |  [240, 240]    |
7628     * ---------------------------------------------------------------|
7629     *     Video stream is present in configure_streams               |
7630     * ---------------------------------------------------------------|
7631     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7632     *                   |             |             | vid_min/max_fps|
7633     * ---------------------------------------------------------------|
7634     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7635     * (camcorder prev   |-------------|-------------|----------------|
7636     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
7637     *  is stopped)      |             |             |                |
7638     * ---------------------------------------------------------------|
7639     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
7640     *                   |-------------|-------------|----------------|
7641     *                   |  [240, 240] |     240     |  [240, 240]    |
7642     * ---------------------------------------------------------------|
7643     * When Video stream is absent in configure_streams,
7644     * preview fps = sensor_fps / batchsize
7645     * Eg: for 240fps at batchSize 4, preview = 60fps
7646     *     for 120fps at batchSize 4, preview = 30fps
7647     *
7648     * When video stream is present in configure_streams, preview fps is as per
7649     * the ratio of preview buffers to video buffers requested in process
7650     * capture request
7651     */
7652    mBatchSize = 0;
7653    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
7654        fps_range.min_fps = fps_range.video_max_fps;
7655        fps_range.video_min_fps = fps_range.video_max_fps;
7656        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
7657                fps_range.max_fps);
7658        if (NAME_NOT_FOUND != val) {
7659            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
7660            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7661                return BAD_VALUE;
7662            }
7663
7664            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
7665                /* If batchmode is currently in progress and the fps changes,
7666                 * set the flag to restart the sensor */
7667                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
7668                        (mHFRVideoFps != fps_range.max_fps)) {
7669                    mNeedSensorRestart = true;
7670                }
7671                mHFRVideoFps = fps_range.max_fps;
7672                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
7673                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
7674                    mBatchSize = MAX_HFR_BATCH_SIZE;
7675                }
7676             }
7677            CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
7678
7679         }
7680    } else {
7681        /* HFR mode is session param in backend/ISP. This should be reset when
7682         * in non-HFR mode  */
7683        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
7684        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
7685            return BAD_VALUE;
7686        }
7687    }
7688    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
7689        return BAD_VALUE;
7690    }
7691    CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
7692            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
7693    return rc;
7694}
7695
7696/*===========================================================================
7697 * FUNCTION   : translateToHalMetadata
7698 *
7699 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
7700 *
7701 *
7702 * PARAMETERS :
7703 *   @request  : request sent from framework
7704 *
7705 *
7706 * RETURN     : success: NO_ERROR
7707 *              failure:
7708 *==========================================================================*/
7709int QCamera3HardwareInterface::translateToHalMetadata
7710                                  (const camera3_capture_request_t *request,
7711                                   metadata_buffer_t *hal_metadata,
7712                                   uint32_t snapshotStreamId)
7713{
7714    int rc = 0;
7715    CameraMetadata frame_settings;
7716    frame_settings = request->settings;
7717
7718    /* Do not change the order of the following list unless you know what you are
7719     * doing.
7720     * The order is laid out in such a way that parameters in the front of the table
7721     * may be used to override the parameters later in the table. Examples are:
7722     * 1. META_MODE should precede AEC/AWB/AF MODE
7723     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
7724     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
7725     * 4. Any mode should precede it's corresponding settings
7726     */
7727    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
7728        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
7729        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
7730            rc = BAD_VALUE;
7731        }
7732        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
7733        if (rc != NO_ERROR) {
7734            ALOGE("%s: extractSceneMode failed", __func__);
7735        }
7736    }
7737
7738    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7739        uint8_t fwk_aeMode =
7740            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7741        uint8_t aeMode;
7742        int32_t redeye;
7743
7744        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
7745            aeMode = CAM_AE_MODE_OFF;
7746        } else {
7747            aeMode = CAM_AE_MODE_ON;
7748        }
7749        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
7750            redeye = 1;
7751        } else {
7752            redeye = 0;
7753        }
7754
7755        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7756                fwk_aeMode);
7757        if (NAME_NOT_FOUND != val) {
7758            int32_t flashMode = (int32_t)val;
7759            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
7760        }
7761
7762        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
7763        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
7764            rc = BAD_VALUE;
7765        }
7766    }
7767
7768    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
7769        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
7770        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7771                fwk_whiteLevel);
7772        if (NAME_NOT_FOUND != val) {
7773            uint8_t whiteLevel = (uint8_t)val;
7774            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
7775                rc = BAD_VALUE;
7776            }
7777        }
7778    }
7779
7780    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
7781        uint8_t fwk_cacMode =
7782                frame_settings.find(
7783                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
7784        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7785                fwk_cacMode);
7786        if (NAME_NOT_FOUND != val) {
7787            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
7788            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
7789                rc = BAD_VALUE;
7790            }
7791        } else {
7792            ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
7793        }
7794    }
7795
7796    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
7797        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
7798        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7799                fwk_focusMode);
7800        if (NAME_NOT_FOUND != val) {
7801            uint8_t focusMode = (uint8_t)val;
7802            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
7803                rc = BAD_VALUE;
7804            }
7805        }
7806    }
7807
7808    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
7809        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
7810        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
7811                focalDistance)) {
7812            rc = BAD_VALUE;
7813        }
7814    }
7815
7816    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
7817        uint8_t fwk_antibandingMode =
7818                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
7819        int val = lookupHalName(ANTIBANDING_MODES_MAP,
7820                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
7821        if (NAME_NOT_FOUND != val) {
7822            uint32_t hal_antibandingMode = (uint32_t)val;
7823            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
7824                    hal_antibandingMode)) {
7825                rc = BAD_VALUE;
7826            }
7827        }
7828    }
7829
7830    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
7831        int32_t expCompensation = frame_settings.find(
7832                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
7833        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
7834            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
7835        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
7836            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
7837        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
7838                expCompensation)) {
7839            rc = BAD_VALUE;
7840        }
7841    }
7842
7843    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
7844        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
7845        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
7846            rc = BAD_VALUE;
7847        }
7848    }
7849    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
7850        rc = setHalFpsRange(frame_settings, hal_metadata);
7851        if (rc != NO_ERROR) {
7852            ALOGE("%s: setHalFpsRange failed", __func__);
7853        }
7854    }
7855
7856    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
7857        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
7858        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
7859            rc = BAD_VALUE;
7860        }
7861    }
7862
7863    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
7864        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
7865        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7866                fwk_effectMode);
7867        if (NAME_NOT_FOUND != val) {
7868            uint8_t effectMode = (uint8_t)val;
7869            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
7870                rc = BAD_VALUE;
7871            }
7872        }
7873    }
7874
7875    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
7876        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
7877        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
7878                colorCorrectMode)) {
7879            rc = BAD_VALUE;
7880        }
7881    }
7882
7883    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
7884        cam_color_correct_gains_t colorCorrectGains;
7885        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
7886            colorCorrectGains.gains[i] =
7887                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
7888        }
7889        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
7890                colorCorrectGains)) {
7891            rc = BAD_VALUE;
7892        }
7893    }
7894
7895    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
7896        cam_color_correct_matrix_t colorCorrectTransform;
7897        cam_rational_type_t transform_elem;
7898        size_t num = 0;
7899        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
7900           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
7901              transform_elem.numerator =
7902                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
7903              transform_elem.denominator =
7904                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
7905              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
7906              num++;
7907           }
7908        }
7909        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
7910                colorCorrectTransform)) {
7911            rc = BAD_VALUE;
7912        }
7913    }
7914
7915    cam_trigger_t aecTrigger;
7916    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
7917    aecTrigger.trigger_id = -1;
7918    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
7919        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
7920        aecTrigger.trigger =
7921            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
7922        aecTrigger.trigger_id =
7923            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
7924        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
7925                aecTrigger)) {
7926            rc = BAD_VALUE;
7927        }
7928        CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
7929                aecTrigger.trigger, aecTrigger.trigger_id);
7930    }
7931
7932    /*af_trigger must come with a trigger id*/
7933    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
7934        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
7935        cam_trigger_t af_trigger;
7936        af_trigger.trigger =
7937            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
7938        af_trigger.trigger_id =
7939            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
7940        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
7941            rc = BAD_VALUE;
7942        }
7943        CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
7944                af_trigger.trigger, af_trigger.trigger_id);
7945    }
7946
7947    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
7948        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
7949        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
7950            rc = BAD_VALUE;
7951        }
7952    }
7953    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
7954        cam_edge_application_t edge_application;
7955        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
7956        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
7957            edge_application.sharpness = 0;
7958        } else {
7959            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
7960        }
7961        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
7962            rc = BAD_VALUE;
7963        }
7964    }
7965
7966    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7967        int32_t respectFlashMode = 1;
7968        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
7969            uint8_t fwk_aeMode =
7970                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
7971            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
7972                respectFlashMode = 0;
7973                CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
7974                    __func__);
7975            }
7976        }
7977        if (respectFlashMode) {
7978            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7979                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7980            CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
7981            // To check: CAM_INTF_META_FLASH_MODE usage
7982            if (NAME_NOT_FOUND != val) {
7983                uint8_t flashMode = (uint8_t)val;
7984                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
7985                    rc = BAD_VALUE;
7986                }
7987            }
7988        }
7989    }
7990
7991    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
7992        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
7993        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
7994            rc = BAD_VALUE;
7995        }
7996    }
7997
7998    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
7999        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8000        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8001                flashFiringTime)) {
8002            rc = BAD_VALUE;
8003        }
8004    }
8005
8006    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8007        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8008        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8009                hotPixelMode)) {
8010            rc = BAD_VALUE;
8011        }
8012    }
8013
8014    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8015        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8016        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8017                lensAperture)) {
8018            rc = BAD_VALUE;
8019        }
8020    }
8021
8022    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8023        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8024        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8025                filterDensity)) {
8026            rc = BAD_VALUE;
8027        }
8028    }
8029
8030    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8031        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8032        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8033                focalLength)) {
8034            rc = BAD_VALUE;
8035        }
8036    }
8037
8038    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8039        uint8_t optStabMode =
8040                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8041        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8042                optStabMode)) {
8043            rc = BAD_VALUE;
8044        }
8045    }
8046
8047    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8048        uint8_t videoStabMode =
8049                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8050        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8051                videoStabMode)) {
8052            rc = BAD_VALUE;
8053        }
8054    }
8055
8056
8057    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8058        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8059        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8060                noiseRedMode)) {
8061            rc = BAD_VALUE;
8062        }
8063    }
8064
8065    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8066        float reprocessEffectiveExposureFactor =
8067            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8068        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8069                reprocessEffectiveExposureFactor)) {
8070            rc = BAD_VALUE;
8071        }
8072    }
8073
8074    cam_crop_region_t scalerCropRegion;
8075    bool scalerCropSet = false;
8076    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8077        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8078        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8079        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8080        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8081
8082        // Map coordinate system from active array to sensor output.
8083        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8084                scalerCropRegion.width, scalerCropRegion.height);
8085
8086        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8087                scalerCropRegion)) {
8088            rc = BAD_VALUE;
8089        }
8090        scalerCropSet = true;
8091    }
8092
8093    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8094        int64_t sensorExpTime =
8095                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8096        CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
8097        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8098                sensorExpTime)) {
8099            rc = BAD_VALUE;
8100        }
8101    }
8102
8103    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8104        int64_t sensorFrameDuration =
8105                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8106        int64_t minFrameDuration = getMinFrameDuration(request);
8107        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8108        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8109            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8110        CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
8111        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8112                sensorFrameDuration)) {
8113            rc = BAD_VALUE;
8114        }
8115    }
8116
8117    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8118        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8119        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8120                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8121        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8122                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8123        CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
8124        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8125                sensorSensitivity)) {
8126            rc = BAD_VALUE;
8127        }
8128    }
8129
8130    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8131        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8132        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8133            rc = BAD_VALUE;
8134        }
8135    }
8136
8137    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8138        uint8_t fwk_facedetectMode =
8139                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8140
8141        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8142                fwk_facedetectMode);
8143
8144        if (NAME_NOT_FOUND != val) {
8145            uint8_t facedetectMode = (uint8_t)val;
8146            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8147                    facedetectMode)) {
8148                rc = BAD_VALUE;
8149            }
8150        }
8151    }
8152
8153    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8154        uint8_t histogramMode =
8155                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8156        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8157                histogramMode)) {
8158            rc = BAD_VALUE;
8159        }
8160    }
8161
8162    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8163        uint8_t sharpnessMapMode =
8164                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8165        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8166                sharpnessMapMode)) {
8167            rc = BAD_VALUE;
8168        }
8169    }
8170
8171    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8172        uint8_t tonemapMode =
8173                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8174        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8175            rc = BAD_VALUE;
8176        }
8177    }
8178    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8179    /*All tonemap channels will have the same number of points*/
8180    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8181        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8182        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8183        cam_rgb_tonemap_curves tonemapCurves;
8184        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8185        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8186            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8187                    __func__, tonemapCurves.tonemap_points_cnt,
8188                    CAM_MAX_TONEMAP_CURVE_SIZE);
8189            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8190        }
8191
8192        /* ch0 = G*/
8193        size_t point = 0;
8194        cam_tonemap_curve_t tonemapCurveGreen;
8195        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8196            for (size_t j = 0; j < 2; j++) {
8197               tonemapCurveGreen.tonemap_points[i][j] =
8198                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8199               point++;
8200            }
8201        }
8202        tonemapCurves.curves[0] = tonemapCurveGreen;
8203
8204        /* ch 1 = B */
8205        point = 0;
8206        cam_tonemap_curve_t tonemapCurveBlue;
8207        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8208            for (size_t j = 0; j < 2; j++) {
8209               tonemapCurveBlue.tonemap_points[i][j] =
8210                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8211               point++;
8212            }
8213        }
8214        tonemapCurves.curves[1] = tonemapCurveBlue;
8215
8216        /* ch 2 = R */
8217        point = 0;
8218        cam_tonemap_curve_t tonemapCurveRed;
8219        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8220            for (size_t j = 0; j < 2; j++) {
8221               tonemapCurveRed.tonemap_points[i][j] =
8222                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8223               point++;
8224            }
8225        }
8226        tonemapCurves.curves[2] = tonemapCurveRed;
8227
8228        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8229                tonemapCurves)) {
8230            rc = BAD_VALUE;
8231        }
8232    }
8233
8234    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8235        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8236        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8237                captureIntent)) {
8238            rc = BAD_VALUE;
8239        }
8240    }
8241
8242    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8243        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8244        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8245                blackLevelLock)) {
8246            rc = BAD_VALUE;
8247        }
8248    }
8249
8250    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8251        uint8_t lensShadingMapMode =
8252                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8253        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8254                lensShadingMapMode)) {
8255            rc = BAD_VALUE;
8256        }
8257    }
8258
8259    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8260        cam_area_t roi;
8261        bool reset = true;
8262        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8263
8264        // Map coordinate system from active array to sensor output.
8265        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8266                roi.rect.height);
8267
8268        if (scalerCropSet) {
8269            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8270        }
8271        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8272            rc = BAD_VALUE;
8273        }
8274    }
8275
8276    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8277        cam_area_t roi;
8278        bool reset = true;
8279        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8280
8281        // Map coordinate system from active array to sensor output.
8282        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8283                roi.rect.height);
8284
8285        if (scalerCropSet) {
8286            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8287        }
8288        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8289            rc = BAD_VALUE;
8290        }
8291    }
8292
8293    if (m_bIs4KVideo) {
8294        /* Override needed for Video template in case of 4K video */
8295        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8296                CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8297            rc = BAD_VALUE;
8298        }
8299    } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8300            frame_settings.exists(QCAMERA3_CDS_MODE)) {
8301        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8302        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8303            ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8304        } else {
8305            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8306                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8307                rc = BAD_VALUE;
8308            }
8309        }
8310    }
8311
8312    // TNR
8313    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8314        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8315        uint8_t b_TnrRequested = 0;
8316        cam_denoise_param_t tnr;
8317        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8318        tnr.process_plates =
8319            (cam_denoise_process_type_t)frame_settings.find(
8320            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8321        b_TnrRequested = tnr.denoise_enable;
8322        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8323            rc = BAD_VALUE;
8324        }
8325    }
8326
8327    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8328        int32_t fwk_testPatternMode =
8329                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8330        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8331                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8332
8333        if (NAME_NOT_FOUND != testPatternMode) {
8334            cam_test_pattern_data_t testPatternData;
8335            memset(&testPatternData, 0, sizeof(testPatternData));
8336            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8337            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8338                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8339                int32_t *fwk_testPatternData =
8340                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8341                testPatternData.r = fwk_testPatternData[0];
8342                testPatternData.b = fwk_testPatternData[3];
8343                switch (gCamCapability[mCameraId]->color_arrangement) {
8344                    case CAM_FILTER_ARRANGEMENT_RGGB:
8345                    case CAM_FILTER_ARRANGEMENT_GRBG:
8346                        testPatternData.gr = fwk_testPatternData[1];
8347                        testPatternData.gb = fwk_testPatternData[2];
8348                        break;
8349                    case CAM_FILTER_ARRANGEMENT_GBRG:
8350                    case CAM_FILTER_ARRANGEMENT_BGGR:
8351                        testPatternData.gr = fwk_testPatternData[2];
8352                        testPatternData.gb = fwk_testPatternData[1];
8353                        break;
8354                    default:
8355                        ALOGE("%s: color arrangement %d is not supported", __func__,
8356                                gCamCapability[mCameraId]->color_arrangement);
8357                        break;
8358                }
8359            }
8360            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8361                    testPatternData)) {
8362                rc = BAD_VALUE;
8363            }
8364        } else {
8365            ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8366                    fwk_testPatternMode);
8367        }
8368    }
8369
8370    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8371        size_t count = 0;
8372        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8373        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8374                gps_coords.data.d, gps_coords.count, count);
8375        if (gps_coords.count != count) {
8376            rc = BAD_VALUE;
8377        }
8378    }
8379
8380    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8381        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8382        size_t count = 0;
8383        const char *gps_methods_src = (const char *)
8384                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8385        memset(gps_methods, '\0', sizeof(gps_methods));
8386        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8387        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8388                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8389        if (GPS_PROCESSING_METHOD_SIZE != count) {
8390            rc = BAD_VALUE;
8391        }
8392    }
8393
8394    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8395        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8396        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8397                gps_timestamp)) {
8398            rc = BAD_VALUE;
8399        }
8400    }
8401
8402    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8403        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8404        cam_rotation_info_t rotation_info;
8405        if (orientation == 0) {
8406           rotation_info.rotation = ROTATE_0;
8407        } else if (orientation == 90) {
8408           rotation_info.rotation = ROTATE_90;
8409        } else if (orientation == 180) {
8410           rotation_info.rotation = ROTATE_180;
8411        } else if (orientation == 270) {
8412           rotation_info.rotation = ROTATE_270;
8413        }
8414        rotation_info.streamId = snapshotStreamId;
8415        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8416        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8417            rc = BAD_VALUE;
8418        }
8419    }
8420
8421    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8422        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8423        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8424            rc = BAD_VALUE;
8425        }
8426    }
8427
8428    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8429        uint32_t thumb_quality = (uint32_t)
8430                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8431        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8432                thumb_quality)) {
8433            rc = BAD_VALUE;
8434        }
8435    }
8436
8437    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8438        cam_dimension_t dim;
8439        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8440        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8441        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8442            rc = BAD_VALUE;
8443        }
8444    }
8445
8446    // Internal metadata
8447    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8448        size_t count = 0;
8449        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8450        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8451                privatedata.data.i32, privatedata.count, count);
8452        if (privatedata.count != count) {
8453            rc = BAD_VALUE;
8454        }
8455    }
8456
8457    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8458        uint8_t* use_av_timer =
8459                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8460        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8461            rc = BAD_VALUE;
8462        }
8463    }
8464
8465    // EV step
8466    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8467            gCamCapability[mCameraId]->exp_compensation_step)) {
8468        rc = BAD_VALUE;
8469    }
8470
8471    // CDS info
8472    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8473        cam_cds_data_t *cdsData = (cam_cds_data_t *)
8474                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8475
8476        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8477                CAM_INTF_META_CDS_DATA, *cdsData)) {
8478            rc = BAD_VALUE;
8479        }
8480    }
8481
8482    return rc;
8483}
8484
8485/*===========================================================================
8486 * FUNCTION   : captureResultCb
8487 *
8488 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8489 *
8490 * PARAMETERS :
8491 *   @frame  : frame information from mm-camera-interface
8492 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8493 *   @userdata: userdata
8494 *
8495 * RETURN     : NONE
8496 *==========================================================================*/
8497void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8498                camera3_stream_buffer_t *buffer,
8499                uint32_t frame_number, bool isInputBuffer, void *userdata)
8500{
8501    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8502    if (hw == NULL) {
8503        ALOGE("%s: Invalid hw %p", __func__, hw);
8504        return;
8505    }
8506
8507    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
8508    return;
8509}
8510
8511
8512/*===========================================================================
8513 * FUNCTION   : initialize
8514 *
8515 * DESCRIPTION: Pass framework callback pointers to HAL
8516 *
8517 * PARAMETERS :
8518 *
8519 *
8520 * RETURN     : Success : 0
8521 *              Failure: -ENODEV
8522 *==========================================================================*/
8523
8524int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8525                                  const camera3_callback_ops_t *callback_ops)
8526{
8527    CDBG("%s: E", __func__);
8528    QCamera3HardwareInterface *hw =
8529        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8530    if (!hw) {
8531        ALOGE("%s: NULL camera device", __func__);
8532        return -ENODEV;
8533    }
8534
8535    int rc = hw->initialize(callback_ops);
8536    CDBG("%s: X", __func__);
8537    return rc;
8538}
8539
8540/*===========================================================================
8541 * FUNCTION   : configure_streams
8542 *
8543 * DESCRIPTION:
8544 *
8545 * PARAMETERS :
8546 *
8547 *
8548 * RETURN     : Success: 0
8549 *              Failure: -EINVAL (if stream configuration is invalid)
8550 *                       -ENODEV (fatal error)
8551 *==========================================================================*/
8552
8553int QCamera3HardwareInterface::configure_streams(
8554        const struct camera3_device *device,
8555        camera3_stream_configuration_t *stream_list)
8556{
8557    CDBG("%s: E", __func__);
8558    QCamera3HardwareInterface *hw =
8559        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8560    if (!hw) {
8561        ALOGE("%s: NULL camera device", __func__);
8562        return -ENODEV;
8563    }
8564    int rc = hw->configureStreams(stream_list);
8565    CDBG("%s: X", __func__);
8566    return rc;
8567}
8568
8569/*===========================================================================
8570 * FUNCTION   : construct_default_request_settings
8571 *
8572 * DESCRIPTION: Configure a settings buffer to meet the required use case
8573 *
8574 * PARAMETERS :
8575 *
8576 *
8577 * RETURN     : Success: Return valid metadata
8578 *              Failure: Return NULL
8579 *==========================================================================*/
8580const camera_metadata_t* QCamera3HardwareInterface::
8581    construct_default_request_settings(const struct camera3_device *device,
8582                                        int type)
8583{
8584
8585    CDBG("%s: E", __func__);
8586    camera_metadata_t* fwk_metadata = NULL;
8587    QCamera3HardwareInterface *hw =
8588        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8589    if (!hw) {
8590        ALOGE("%s: NULL camera device", __func__);
8591        return NULL;
8592    }
8593
8594    fwk_metadata = hw->translateCapabilityToMetadata(type);
8595
8596    CDBG("%s: X", __func__);
8597    return fwk_metadata;
8598}
8599
8600/*===========================================================================
8601 * FUNCTION   : process_capture_request
8602 *
8603 * DESCRIPTION:
8604 *
8605 * PARAMETERS :
8606 *
8607 *
8608 * RETURN     :
8609 *==========================================================================*/
8610int QCamera3HardwareInterface::process_capture_request(
8611                    const struct camera3_device *device,
8612                    camera3_capture_request_t *request)
8613{
8614    CDBG("%s: E", __func__);
8615    QCamera3HardwareInterface *hw =
8616        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8617    if (!hw) {
8618        ALOGE("%s: NULL camera device", __func__);
8619        return -EINVAL;
8620    }
8621
8622    int rc = hw->processCaptureRequest(request);
8623    CDBG("%s: X", __func__);
8624    return rc;
8625}
8626
8627/*===========================================================================
8628 * FUNCTION   : dump
8629 *
8630 * DESCRIPTION:
8631 *
8632 * PARAMETERS :
8633 *
8634 *
8635 * RETURN     :
8636 *==========================================================================*/
8637
8638void QCamera3HardwareInterface::dump(
8639                const struct camera3_device *device, int fd)
8640{
8641    /* Log level property is read when "adb shell dumpsys media.camera" is
8642       called so that the log level can be controlled without restarting
8643       the media server */
8644    getLogLevel();
8645
8646    CDBG("%s: E", __func__);
8647    QCamera3HardwareInterface *hw =
8648        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8649    if (!hw) {
8650        ALOGE("%s: NULL camera device", __func__);
8651        return;
8652    }
8653
8654    hw->dump(fd);
8655    CDBG("%s: X", __func__);
8656    return;
8657}
8658
8659/*===========================================================================
8660 * FUNCTION   : flush
8661 *
8662 * DESCRIPTION:
8663 *
8664 * PARAMETERS :
8665 *
8666 *
8667 * RETURN     :
8668 *==========================================================================*/
8669
8670int QCamera3HardwareInterface::flush(
8671                const struct camera3_device *device)
8672{
8673    int rc;
8674    CDBG("%s: E", __func__);
8675    QCamera3HardwareInterface *hw =
8676        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8677    if (!hw) {
8678        ALOGE("%s: NULL camera device", __func__);
8679        return -EINVAL;
8680    }
8681
8682    rc = hw->flush();
8683    CDBG("%s: X", __func__);
8684    return rc;
8685}
8686
8687/*===========================================================================
8688 * FUNCTION   : close_camera_device
8689 *
8690 * DESCRIPTION:
8691 *
8692 * PARAMETERS :
8693 *
8694 *
8695 * RETURN     :
8696 *==========================================================================*/
8697int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
8698{
8699    CDBG("%s: E", __func__);
8700    int ret = NO_ERROR;
8701    QCamera3HardwareInterface *hw =
8702        reinterpret_cast<QCamera3HardwareInterface *>(
8703            reinterpret_cast<camera3_device_t *>(device)->priv);
8704    if (!hw) {
8705        ALOGE("NULL camera device");
8706        return BAD_VALUE;
8707    }
8708    delete hw;
8709
8710    CDBG("%s: X", __func__);
8711    return ret;
8712}
8713
8714/*===========================================================================
8715 * FUNCTION   : getWaveletDenoiseProcessPlate
8716 *
8717 * DESCRIPTION: query wavelet denoise process plate
8718 *
8719 * PARAMETERS : None
8720 *
8721 * RETURN     : WNR prcocess plate value
8722 *==========================================================================*/
8723cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
8724{
8725    char prop[PROPERTY_VALUE_MAX];
8726    memset(prop, 0, sizeof(prop));
8727    property_get("persist.denoise.process.plates", prop, "0");
8728    int processPlate = atoi(prop);
8729    switch(processPlate) {
8730    case 0:
8731        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8732    case 1:
8733        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8734    case 2:
8735        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8736    case 3:
8737        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8738    default:
8739        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8740    }
8741}
8742
8743
8744/*===========================================================================
8745 * FUNCTION   : getTemporalDenoiseProcessPlate
8746 *
8747 * DESCRIPTION: query temporal denoise process plate
8748 *
8749 * PARAMETERS : None
8750 *
8751 * RETURN     : TNR prcocess plate value
8752 *==========================================================================*/
8753cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
8754{
8755    char prop[PROPERTY_VALUE_MAX];
8756    memset(prop, 0, sizeof(prop));
8757    property_get("persist.tnr.process.plates", prop, "0");
8758    int processPlate = atoi(prop);
8759    switch(processPlate) {
8760    case 0:
8761        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
8762    case 1:
8763        return CAM_WAVELET_DENOISE_CBCR_ONLY;
8764    case 2:
8765        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8766    case 3:
8767        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
8768    default:
8769        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
8770    }
8771}
8772
8773
8774/*===========================================================================
8775 * FUNCTION   : extractSceneMode
8776 *
8777 * DESCRIPTION: Extract scene mode from frameworks set metadata
8778 *
8779 * PARAMETERS :
8780 *      @frame_settings: CameraMetadata reference
8781 *      @metaMode: ANDROID_CONTORL_MODE
8782 *      @hal_metadata: hal metadata structure
8783 *
8784 * RETURN     : None
8785 *==========================================================================*/
8786int32_t QCamera3HardwareInterface::extractSceneMode(
8787        const CameraMetadata &frame_settings, uint8_t metaMode,
8788        metadata_buffer_t *hal_metadata)
8789{
8790    int32_t rc = NO_ERROR;
8791
8792    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
8793        camera_metadata_ro_entry entry =
8794                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
8795        if (0 == entry.count)
8796            return rc;
8797
8798        uint8_t fwk_sceneMode = entry.data.u8[0];
8799
8800        int val = lookupHalName(SCENE_MODES_MAP,
8801                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
8802                fwk_sceneMode);
8803        if (NAME_NOT_FOUND != val) {
8804            uint8_t sceneMode = (uint8_t)val;
8805            CDBG("%s: sceneMode: %d", __func__, sceneMode);
8806            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8807                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8808                rc = BAD_VALUE;
8809            }
8810        }
8811    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
8812            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
8813        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
8814        CDBG("%s: sceneMode: %d", __func__, sceneMode);
8815        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8816                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
8817            rc = BAD_VALUE;
8818        }
8819    }
8820    return rc;
8821}
8822
8823/*===========================================================================
8824 * FUNCTION   : needRotationReprocess
8825 *
8826 * DESCRIPTION: if rotation needs to be done by reprocess in pp
8827 *
8828 * PARAMETERS : none
8829 *
8830 * RETURN     : true: needed
8831 *              false: no need
8832 *==========================================================================*/
8833bool QCamera3HardwareInterface::needRotationReprocess()
8834{
8835    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
8836        // current rotation is not zero, and pp has the capability to process rotation
8837        CDBG_HIGH("%s: need do reprocess for rotation", __func__);
8838        return true;
8839    }
8840
8841    return false;
8842}
8843
8844/*===========================================================================
8845 * FUNCTION   : needReprocess
8846 *
8847 * DESCRIPTION: if reprocess in needed
8848 *
8849 * PARAMETERS : none
8850 *
8851 * RETURN     : true: needed
8852 *              false: no need
8853 *==========================================================================*/
8854bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
8855{
8856    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
8857        // TODO: add for ZSL HDR later
8858        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
8859        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
8860            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
8861            return true;
8862        } else {
8863            CDBG_HIGH("%s: already post processed frame", __func__);
8864            return false;
8865        }
8866    }
8867    return needRotationReprocess();
8868}
8869
8870/*===========================================================================
8871 * FUNCTION   : needJpegRotation
8872 *
8873 * DESCRIPTION: if rotation from jpeg is needed
8874 *
8875 * PARAMETERS : none
8876 *
8877 * RETURN     : true: needed
8878 *              false: no need
8879 *==========================================================================*/
8880bool QCamera3HardwareInterface::needJpegRotation()
8881{
8882   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
8883    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
8884       CDBG("%s: Need Jpeg to do the rotation", __func__);
8885       return true;
8886    }
8887    return false;
8888}
8889
8890/*===========================================================================
8891 * FUNCTION   : addOfflineReprocChannel
8892 *
8893 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
8894 *              coming from input channel
8895 *
8896 * PARAMETERS :
8897 *   @config  : reprocess configuration
8898 *   @inputChHandle : pointer to the input (source) channel
8899 *
8900 *
8901 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
8902 *==========================================================================*/
8903QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
8904        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
8905{
8906    int32_t rc = NO_ERROR;
8907    QCamera3ReprocessChannel *pChannel = NULL;
8908
8909    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
8910            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
8911            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
8912    if (NULL == pChannel) {
8913        ALOGE("%s: no mem for reprocess channel", __func__);
8914        return NULL;
8915    }
8916
8917    rc = pChannel->initialize(IS_TYPE_NONE);
8918    if (rc != NO_ERROR) {
8919        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
8920        delete pChannel;
8921        return NULL;
8922    }
8923
8924    // pp feature config
8925    cam_pp_feature_config_t pp_config;
8926    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
8927
8928    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
8929
8930    rc = pChannel->addReprocStreamsFromSource(pp_config,
8931            config,
8932            IS_TYPE_NONE,
8933            mMetadataChannel);
8934
8935    if (rc != NO_ERROR) {
8936        delete pChannel;
8937        return NULL;
8938    }
8939    return pChannel;
8940}
8941
8942/*===========================================================================
8943 * FUNCTION   : getMobicatMask
8944 *
8945 * DESCRIPTION: returns mobicat mask
8946 *
8947 * PARAMETERS : none
8948 *
8949 * RETURN     : mobicat mask
8950 *
8951 *==========================================================================*/
8952uint8_t QCamera3HardwareInterface::getMobicatMask()
8953{
8954    return m_MobicatMask;
8955}
8956
8957/*===========================================================================
8958 * FUNCTION   : setMobicat
8959 *
8960 * DESCRIPTION: set Mobicat on/off.
8961 *
8962 * PARAMETERS :
8963 *   @params  : none
8964 *
8965 * RETURN     : int32_t type of status
8966 *              NO_ERROR  -- success
8967 *              none-zero failure code
8968 *==========================================================================*/
8969int32_t QCamera3HardwareInterface::setMobicat()
8970{
8971    char value [PROPERTY_VALUE_MAX];
8972    property_get("persist.camera.mobicat", value, "0");
8973    int32_t ret = NO_ERROR;
8974    uint8_t enableMobi = (uint8_t)atoi(value);
8975
8976    if (enableMobi) {
8977        tune_cmd_t tune_cmd;
8978        tune_cmd.type = SET_RELOAD_CHROMATIX;
8979        tune_cmd.module = MODULE_ALL;
8980        tune_cmd.value = TRUE;
8981        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8982                CAM_INTF_PARM_SET_VFE_COMMAND,
8983                tune_cmd);
8984
8985        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
8986                CAM_INTF_PARM_SET_PP_COMMAND,
8987                tune_cmd);
8988    }
8989    m_MobicatMask = enableMobi;
8990
8991    return ret;
8992}
8993
8994/*===========================================================================
8995* FUNCTION   : getLogLevel
8996*
8997* DESCRIPTION: Reads the log level property into a variable
8998*
8999* PARAMETERS :
9000*   None
9001*
9002* RETURN     :
9003*   None
9004*==========================================================================*/
9005void QCamera3HardwareInterface::getLogLevel()
9006{
9007    char prop[PROPERTY_VALUE_MAX];
9008    uint32_t globalLogLevel = 0;
9009
9010    property_get("persist.camera.hal.debug", prop, "0");
9011    int val = atoi(prop);
9012    if (0 <= val) {
9013        gCamHal3LogLevel = (uint32_t)val;
9014    }
9015    property_get("persist.camera.global.debug", prop, "0");
9016    val = atoi(prop);
9017    if (0 <= val) {
9018        globalLogLevel = (uint32_t)val;
9019    }
9020
9021    /* Highest log level among hal.logs and global.logs is selected */
9022    if (gCamHal3LogLevel < globalLogLevel)
9023        gCamHal3LogLevel = globalLogLevel;
9024
9025    return;
9026}
9027
9028/*===========================================================================
9029 * FUNCTION   : validateStreamRotations
9030 *
9031 * DESCRIPTION: Check if the rotations requested are supported
9032 *
9033 * PARAMETERS :
9034 *   @stream_list : streams to be configured
9035 *
9036 * RETURN     : NO_ERROR on success
9037 *              -EINVAL on failure
9038 *
9039 *==========================================================================*/
9040int QCamera3HardwareInterface::validateStreamRotations(
9041        camera3_stream_configuration_t *streamList)
9042{
9043    int rc = NO_ERROR;
9044
9045    /*
9046    * Loop through all streams requested in configuration
9047    * Check if unsupported rotations have been requested on any of them
9048    */
9049    for (size_t j = 0; j < streamList->num_streams; j++){
9050        camera3_stream_t *newStream = streamList->streams[j];
9051
9052        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9053        bool isImplDef = (newStream->format ==
9054                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9055        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9056                isImplDef);
9057
9058        if (isRotated && (!isImplDef || isZsl)) {
9059            ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
9060                    "type:%d and stream format:%d", __func__,
9061                    newStream->rotation, newStream->stream_type,
9062                    newStream->format);
9063            rc = -EINVAL;
9064            break;
9065        }
9066    }
9067    return rc;
9068}
9069
9070/*===========================================================================
9071* FUNCTION   : getFlashInfo
9072*
9073* DESCRIPTION: Retrieve information about whether the device has a flash.
9074*
9075* PARAMETERS :
9076*   @cameraId  : Camera id to query
9077*   @hasFlash  : Boolean indicating whether there is a flash device
9078*                associated with given camera
9079*   @flashNode : If a flash device exists, this will be its device node.
9080*
9081* RETURN     :
9082*   None
9083*==========================================================================*/
9084void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9085        bool& hasFlash,
9086        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9087{
9088    cam_capability_t* camCapability = gCamCapability[cameraId];
9089    if (NULL == camCapability) {
9090        hasFlash = false;
9091        flashNode[0] = '\0';
9092    } else {
9093        hasFlash = camCapability->flash_available;
9094        strlcpy(flashNode,
9095                (char*)camCapability->flash_dev_name,
9096                QCAMERA_MAX_FILEPATH_LENGTH);
9097    }
9098}
9099
9100/*===========================================================================
9101* FUNCTION   : getEepromVersionInfo
9102*
9103* DESCRIPTION: Retrieve version info of the sensor EEPROM data
9104*
9105* PARAMETERS : None
9106*
9107* RETURN     : string describing EEPROM version
9108*              "\0" if no such info available
9109*==========================================================================*/
9110const char *QCamera3HardwareInterface::getEepromVersionInfo()
9111{
9112    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9113}
9114
9115/*===========================================================================
9116* FUNCTION   : getLdafCalib
9117*
9118* DESCRIPTION: Retrieve Laser AF calibration data
9119*
9120* PARAMETERS : None
9121*
9122* RETURN     : Two uint32_t describing laser AF calibration data
9123*              NULL if none is available.
9124*==========================================================================*/
9125const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9126{
9127    if (mLdafCalibExist) {
9128        return &mLdafCalib[0];
9129    } else {
9130        return NULL;
9131    }
9132}
9133
9134/*===========================================================================
9135 * FUNCTION   : dynamicUpdateMetaStreamInfo
9136 *
9137 * DESCRIPTION: This function:
9138 *             (1) stops all the channels
9139 *             (2) returns error on pending requests and buffers
9140 *             (3) sends metastream_info in setparams
9141 *             (4) starts all channels
9142 *             This is useful when sensor has to be restarted to apply any
9143 *             settings such as frame rate from a different sensor mode
9144 *
9145 * PARAMETERS : None
9146 *
9147 * RETURN     : NO_ERROR on success
9148 *              Error codes on failure
9149 *
9150 *==========================================================================*/
9151int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9152{
9153    ATRACE_CALL();
9154    int rc = NO_ERROR;
9155
9156    CDBG("%s: E", __func__);
9157
9158    rc = stopAllChannels();
9159    if (rc < 0) {
9160        ALOGE("%s: stopAllChannels failed", __func__);
9161        return rc;
9162    }
9163
9164    rc = notifyErrorForPendingRequests();
9165    if (rc < 0) {
9166        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9167        return rc;
9168    }
9169
9170    /* Send meta stream info once again so that ISP can start */
9171    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9172            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9173    CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9174    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9175            mParameters);
9176    if (rc < 0) {
9177        ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9178                __func__);
9179    }
9180
9181    rc = startAllChannels();
9182    if (rc < 0) {
9183        ALOGE("%s: startAllChannels failed", __func__);
9184        return rc;
9185    }
9186
9187    CDBG("%s:%d X", __func__, __LINE__);
9188    return rc;
9189}
9190
9191/*===========================================================================
9192 * FUNCTION   : stopAllChannels
9193 *
9194 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9195 *
9196 * PARAMETERS : None
9197 *
9198 * RETURN     : NO_ERROR on success
9199 *              Error codes on failure
9200 *
9201 *==========================================================================*/
9202int32_t QCamera3HardwareInterface::stopAllChannels()
9203{
9204    int32_t rc = NO_ERROR;
9205
9206    // Stop the Streams/Channels
9207    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9208        it != mStreamInfo.end(); it++) {
9209        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9210        channel->stop();
9211        (*it)->status = INVALID;
9212    }
9213
9214    if (mSupportChannel) {
9215        mSupportChannel->stop();
9216    }
9217    if (mAnalysisChannel) {
9218        mAnalysisChannel->stop();
9219    }
9220    if (mRawDumpChannel) {
9221        mRawDumpChannel->stop();
9222    }
9223    if (mMetadataChannel) {
9224        /* If content of mStreamInfo is not 0, there is metadata stream */
9225        mMetadataChannel->stop();
9226    }
9227
9228    CDBG("%s:%d All channels stopped", __func__, __LINE__);
9229    return rc;
9230}
9231
9232/*===========================================================================
9233 * FUNCTION   : startAllChannels
9234 *
9235 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9236 *
9237 * PARAMETERS : None
9238 *
9239 * RETURN     : NO_ERROR on success
9240 *              Error codes on failure
9241 *
9242 *==========================================================================*/
9243int32_t QCamera3HardwareInterface::startAllChannels()
9244{
9245    int32_t rc = NO_ERROR;
9246
9247    CDBG("%s: Start all channels ", __func__);
9248    // Start the Streams/Channels
9249    if (mMetadataChannel) {
9250        /* If content of mStreamInfo is not 0, there is metadata stream */
9251        rc = mMetadataChannel->start();
9252        if (rc < 0) {
9253            ALOGE("%s: META channel start failed", __func__);
9254            return rc;
9255        }
9256    }
9257    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9258        it != mStreamInfo.end(); it++) {
9259        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9260        rc = channel->start();
9261        if (rc < 0) {
9262            ALOGE("%s: channel start failed", __func__);
9263            return rc;
9264        }
9265    }
9266    if (mAnalysisChannel) {
9267        mAnalysisChannel->start();
9268    }
9269    if (mSupportChannel) {
9270        rc = mSupportChannel->start();
9271        if (rc < 0) {
9272            ALOGE("%s: Support channel start failed", __func__);
9273            return rc;
9274        }
9275    }
9276    if (mRawDumpChannel) {
9277        rc = mRawDumpChannel->start();
9278        if (rc < 0) {
9279            ALOGE("%s: RAW dump channel start failed", __func__);
9280            return rc;
9281        }
9282    }
9283
9284    CDBG("%s:%d All channels started", __func__, __LINE__);
9285    return rc;
9286}
9287
9288/*===========================================================================
9289 * FUNCTION   : notifyErrorForPendingRequests
9290 *
9291 * DESCRIPTION: This function sends error for all the pending requests/buffers
9292 *
9293 * PARAMETERS : None
9294 *
9295 * RETURN     : Error codes
9296 *              NO_ERROR on success
9297 *
9298 *==========================================================================*/
9299int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9300{
9301    int32_t rc = NO_ERROR;
9302    unsigned int frameNum = 0;
9303    camera3_capture_result_t result;
9304    camera3_stream_buffer_t *pStream_Buf = NULL;
9305    FlushMap flushMap;
9306
9307    memset(&result, 0, sizeof(camera3_capture_result_t));
9308
9309    if (mPendingRequestsList.size() > 0) {
9310        pendingRequestIterator i = mPendingRequestsList.begin();
9311        frameNum = i->frame_number;
9312    } else {
9313        /* There might still be pending buffers even though there are
9314         no pending requests. Setting the frameNum to MAX so that
9315         all the buffers with smaller frame numbers are returned */
9316        frameNum = UINT_MAX;
9317    }
9318
9319    CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
9320      __func__, frameNum);
9321
9322    // Go through the pending buffers and group them depending
9323    // on frame number
9324    for (List<PendingBufferInfo>::iterator k =
9325            mPendingBuffersMap.mPendingBufferList.begin();
9326            k != mPendingBuffersMap.mPendingBufferList.end();) {
9327
9328        if (k->frame_number < frameNum) {
9329            ssize_t idx = flushMap.indexOfKey(k->frame_number);
9330            if (idx == NAME_NOT_FOUND) {
9331                Vector<PendingBufferInfo> pending;
9332                pending.add(*k);
9333                flushMap.add(k->frame_number, pending);
9334            } else {
9335                Vector<PendingBufferInfo> &pending =
9336                        flushMap.editValueFor(k->frame_number);
9337                pending.add(*k);
9338            }
9339
9340            mPendingBuffersMap.num_buffers--;
9341            k = mPendingBuffersMap.mPendingBufferList.erase(k);
9342        } else {
9343            k++;
9344        }
9345    }
9346
9347    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9348        uint32_t frame_number = flushMap.keyAt(iFlush);
9349        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9350
9351        // Send Error notify to frameworks for each buffer for which
9352        // metadata buffer is already sent
9353        CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9354          __func__, frame_number, pending.size());
9355
9356        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9357        if (NULL == pStream_Buf) {
9358            ALOGE("%s: No memory for pending buffers array", __func__);
9359            return NO_MEMORY;
9360        }
9361        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9362
9363        for (size_t j = 0; j < pending.size(); j++) {
9364            const PendingBufferInfo &info = pending.itemAt(j);
9365            camera3_notify_msg_t notify_msg;
9366            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9367            notify_msg.type = CAMERA3_MSG_ERROR;
9368            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9369            notify_msg.message.error.error_stream = info.stream;
9370            notify_msg.message.error.frame_number = frame_number;
9371            pStream_Buf[j].acquire_fence = -1;
9372            pStream_Buf[j].release_fence = -1;
9373            pStream_Buf[j].buffer = info.buffer;
9374            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9375            pStream_Buf[j].stream = info.stream;
9376            mCallbackOps->notify(mCallbackOps, &notify_msg);
9377            CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9378                    frame_number, info.stream);
9379        }
9380
9381        result.result = NULL;
9382        result.frame_number = frame_number;
9383        result.num_output_buffers = (uint32_t)pending.size();
9384        result.output_buffers = pStream_Buf;
9385        mCallbackOps->process_capture_result(mCallbackOps, &result);
9386
9387        delete [] pStream_Buf;
9388    }
9389
9390    CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9391
9392    flushMap.clear();
9393    for (List<PendingBufferInfo>::iterator k =
9394            mPendingBuffersMap.mPendingBufferList.begin();
9395            k != mPendingBuffersMap.mPendingBufferList.end();) {
9396        ssize_t idx = flushMap.indexOfKey(k->frame_number);
9397        if (idx == NAME_NOT_FOUND) {
9398            Vector<PendingBufferInfo> pending;
9399            pending.add(*k);
9400            flushMap.add(k->frame_number, pending);
9401        } else {
9402            Vector<PendingBufferInfo> &pending =
9403                    flushMap.editValueFor(k->frame_number);
9404            pending.add(*k);
9405        }
9406
9407        mPendingBuffersMap.num_buffers--;
9408        k = mPendingBuffersMap.mPendingBufferList.erase(k);
9409    }
9410
9411    pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9412
9413    // Go through the pending requests info and send error request to framework
9414    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9415        uint32_t frame_number = flushMap.keyAt(iFlush);
9416        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9417        CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9418              __func__, frame_number);
9419
9420        // Send shutter notify to frameworks
9421        camera3_notify_msg_t notify_msg;
9422        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9423        notify_msg.type = CAMERA3_MSG_ERROR;
9424        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9425        notify_msg.message.error.error_stream = NULL;
9426        notify_msg.message.error.frame_number = frame_number;
9427        mCallbackOps->notify(mCallbackOps, &notify_msg);
9428
9429        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9430        if (NULL == pStream_Buf) {
9431            ALOGE("%s: No memory for pending buffers array", __func__);
9432            return NO_MEMORY;
9433        }
9434        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9435
9436        for (size_t j = 0; j < pending.size(); j++) {
9437            const PendingBufferInfo &info = pending.itemAt(j);
9438            pStream_Buf[j].acquire_fence = -1;
9439            pStream_Buf[j].release_fence = -1;
9440            pStream_Buf[j].buffer = info.buffer;
9441            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9442            pStream_Buf[j].stream = info.stream;
9443        }
9444
9445        result.input_buffer = i->input_buffer;
9446        result.num_output_buffers = (uint32_t)pending.size();
9447        result.output_buffers = pStream_Buf;
9448        result.result = NULL;
9449        result.frame_number = frame_number;
9450        mCallbackOps->process_capture_result(mCallbackOps, &result);
9451        delete [] pStream_Buf;
9452        i = erasePendingRequest(i);
9453    }
9454
9455    /* Reset pending frame Drop list and requests list */
9456    mPendingFrameDropList.clear();
9457
9458    flushMap.clear();
9459    mPendingBuffersMap.num_buffers = 0;
9460    mPendingBuffersMap.mPendingBufferList.clear();
9461    mPendingReprocessResultList.clear();
9462    CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9463
9464    return rc;
9465}
9466
9467bool QCamera3HardwareInterface::isOnEncoder(
9468        const cam_dimension_t max_viewfinder_size,
9469        uint32_t width, uint32_t height)
9470{
9471    return (width > (uint32_t)max_viewfinder_size.width ||
9472            height > (uint32_t)max_viewfinder_size.height);
9473}
9474
9475/*===========================================================================
9476 * FUNCTION   : setBundleInfo
9477 *
9478 * DESCRIPTION: Set bundle info for all streams that are bundle.
9479 *
9480 * PARAMETERS : None
9481 *
9482 * RETURN     : NO_ERROR on success
9483 *              Error codes on failure
9484 *==========================================================================*/
9485int32_t QCamera3HardwareInterface::setBundleInfo()
9486{
9487    int32_t rc = NO_ERROR;
9488
9489    if (mChannelHandle) {
9490        cam_bundle_config_t bundleInfo;
9491        memset(&bundleInfo, 0, sizeof(bundleInfo));
9492        rc = mCameraHandle->ops->get_bundle_info(
9493                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9494        if (rc != NO_ERROR) {
9495            ALOGE("%s: get_bundle_info failed", __func__);
9496            return rc;
9497        }
9498        if (mAnalysisChannel) {
9499            mAnalysisChannel->setBundleInfo(bundleInfo);
9500        }
9501        if (mSupportChannel) {
9502            mSupportChannel->setBundleInfo(bundleInfo);
9503        }
9504        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9505                it != mStreamInfo.end(); it++) {
9506            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9507            channel->setBundleInfo(bundleInfo);
9508        }
9509        if (mRawDumpChannel) {
9510            mRawDumpChannel->setBundleInfo(bundleInfo);
9511        }
9512    }
9513
9514    return rc;
9515}
9516
9517}; //end namespace qcamera
9518