1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6*     * Redistributions of source code must retain the above copyright
7*       notice, this list of conditions and the following disclaimer.
8*     * Redistributions in binary form must reproduce the above
9*       copyright notice, this list of conditions and the following
10*       disclaimer in the documentation and/or other materials provided
11*       with the distribution.
12*     * Neither the name of The Linux Foundation nor the names of its
13*       contributors may be used to endorse or promote products derived
14*       from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define ATRACE_TAG ATRACE_TAG_CAMERA
31#define LOG_TAG "QCamera3HWI"
32//#define LOG_NDEBUG 0
33
34#define __STDC_LIMIT_MACROS
35#include <cutils/properties.h>
36#include <hardware/camera3.h>
37#include <camera/CameraMetadata.h>
38#include <stdio.h>
39#include <stdlib.h>
40#include <fcntl.h>
41#include <stdint.h>
42#include <utils/Log.h>
43#include <utils/Errors.h>
44#include <utils/Trace.h>
45#include <sync/sync.h>
46#include <gralloc_priv.h>
47#include "util/QCameraFlash.h"
48#include "QCamera3HWI.h"
49#include "QCamera3Mem.h"
50#include "QCamera3Channel.h"
51#include "QCamera3PostProc.h"
52#include "QCamera3VendorTags.h"
53#include "cam_cond.h"
54
55using namespace android;
56
57namespace qcamera {
58
59#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
60
61#define EMPTY_PIPELINE_DELAY 2
62#define PARTIAL_RESULT_COUNT 3
63#define FRAME_SKIP_DELAY     0
64#define CAM_MAX_SYNC_LATENCY 4
65
66#define MAX_VALUE_8BIT ((1<<8)-1)
67#define MAX_VALUE_10BIT ((1<<10)-1)
68#define MAX_VALUE_12BIT ((1<<12)-1)
69
70#define VIDEO_4K_WIDTH  3840
71#define VIDEO_4K_HEIGHT 2160
72
73#define MAX_EIS_WIDTH 1920
74#define MAX_EIS_HEIGHT 1080
75
76#define MAX_RAW_STREAMS        1
77#define MAX_STALLING_STREAMS   1
78#define MAX_PROCESSED_STREAMS  3
79/* Batch mode is enabled only if FPS set is equal to or greater than this */
80#define MIN_FPS_FOR_BATCH_MODE (120)
81#define PREVIEW_FPS_FOR_HFR    (30)
82#define DEFAULT_VIDEO_FPS      (30.0)
83#define MAX_HFR_BATCH_SIZE     (8)
84#define REGIONS_TUPLE_COUNT    5
85#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
86
87#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
88
89#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
90                                              CAM_QCOM_FEATURE_CROP |\
91                                              CAM_QCOM_FEATURE_ROTATION |\
92                                              CAM_QCOM_FEATURE_SHARPNESS |\
93                                              CAM_QCOM_FEATURE_SCALE |\
94                                              CAM_QCOM_FEATURE_CAC |\
95                                              CAM_QCOM_FEATURE_CDS )
96
97#define TIMEOUT_NEVER -1
98
99cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
100const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
101static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
102volatile uint32_t gCamHal3LogLevel = 1;
103
104const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
105    {"On",  CAM_CDS_MODE_ON},
106    {"Off", CAM_CDS_MODE_OFF},
107    {"Auto",CAM_CDS_MODE_AUTO}
108};
109
110const QCamera3HardwareInterface::QCameraMap<
111        camera_metadata_enum_android_control_effect_mode_t,
112        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
113    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
114    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
115    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
116    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
117    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
118    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
119    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
120    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
121    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
122};
123
124const QCamera3HardwareInterface::QCameraMap<
125        camera_metadata_enum_android_control_awb_mode_t,
126        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
127    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
128    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
129    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
130    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
131    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
132    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
133    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
134    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
135    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
136};
137
138const QCamera3HardwareInterface::QCameraMap<
139        camera_metadata_enum_android_control_scene_mode_t,
140        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
141    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
142    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
143    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
144    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
145    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
146    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
147    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
148    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
149    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
150    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
151    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
152    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
153    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
154    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
155    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
156    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
157};
158
159const QCamera3HardwareInterface::QCameraMap<
160        camera_metadata_enum_android_control_af_mode_t,
161        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
162    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
163    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
164    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
165    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
166    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
167    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
168    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
169};
170
171const QCamera3HardwareInterface::QCameraMap<
172        camera_metadata_enum_android_color_correction_aberration_mode_t,
173        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
174    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
175            CAM_COLOR_CORRECTION_ABERRATION_OFF },
176    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
177            CAM_COLOR_CORRECTION_ABERRATION_FAST },
178    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
179            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
180};
181
182const QCamera3HardwareInterface::QCameraMap<
183        camera_metadata_enum_android_control_ae_antibanding_mode_t,
184        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
185    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
186    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
187    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
188    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
189};
190
191const QCamera3HardwareInterface::QCameraMap<
192        camera_metadata_enum_android_control_ae_mode_t,
193        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
194    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
195    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
196    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
197    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
198    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202        camera_metadata_enum_android_flash_mode_t,
203        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
204    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
205    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
206    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
207};
208
209const QCamera3HardwareInterface::QCameraMap<
210        camera_metadata_enum_android_statistics_face_detect_mode_t,
211        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
212    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
213    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
214    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
219        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
220    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
221      CAM_FOCUS_UNCALIBRATED },
222    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
223      CAM_FOCUS_APPROXIMATE },
224    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
225      CAM_FOCUS_CALIBRATED }
226};
227
228const QCamera3HardwareInterface::QCameraMap<
229        camera_metadata_enum_android_lens_state_t,
230        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
231    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
232    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
233};
234
235const int32_t available_thumbnail_sizes[] = {0, 0,
236                                             176, 144,
237                                             320, 240,
238                                             432, 288,
239                                             480, 288,
240                                             512, 288,
241                                             512, 384};
242
243const QCamera3HardwareInterface::QCameraMap<
244        camera_metadata_enum_android_sensor_test_pattern_mode_t,
245        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
246    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
247    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
248    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
249    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
250    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
251};
252
253/* Since there is no mapping for all the options some Android enum are not listed.
254 * Also, the order in this list is important because while mapping from HAL to Android it will
255 * traverse from lower to higher index which means that for HAL values that are map to different
256 * Android values, the traverse logic will select the first one found.
257 */
258const QCamera3HardwareInterface::QCameraMap<
259        camera_metadata_enum_android_sensor_reference_illuminant1_t,
260        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
261    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
262    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
263    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
264    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
265    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
266    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
267    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
268    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
269    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
270    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
271    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
272    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
273    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
274    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
275    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
276    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
277};
278
279const QCamera3HardwareInterface::QCameraMap<
280        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
281    { 60, CAM_HFR_MODE_60FPS},
282    { 90, CAM_HFR_MODE_90FPS},
283    { 120, CAM_HFR_MODE_120FPS},
284    { 150, CAM_HFR_MODE_150FPS},
285    { 180, CAM_HFR_MODE_180FPS},
286    { 210, CAM_HFR_MODE_210FPS},
287    { 240, CAM_HFR_MODE_240FPS},
288    { 480, CAM_HFR_MODE_480FPS},
289};
290
291camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
292    .initialize =                         QCamera3HardwareInterface::initialize,
293    .configure_streams =                  QCamera3HardwareInterface::configure_streams,
294    .register_stream_buffers =            NULL,
295    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
296    .process_capture_request =            QCamera3HardwareInterface::process_capture_request,
297    .get_metadata_vendor_tag_ops =        NULL,
298    .dump =                               QCamera3HardwareInterface::dump,
299    .flush =                              QCamera3HardwareInterface::flush,
300    .reserved =                           {0},
301};
302
303/*===========================================================================
304 * FUNCTION   : QCamera3HardwareInterface
305 *
306 * DESCRIPTION: constructor of QCamera3HardwareInterface
307 *
308 * PARAMETERS :
309 *   @cameraId  : camera ID
310 *
311 * RETURN     : none
312 *==========================================================================*/
313QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
314        const camera_module_callbacks_t *callbacks)
315    : mCameraId(cameraId),
316      mCameraHandle(NULL),
317      mCameraOpened(false),
318      mCameraInitialized(false),
319      mCallbackOps(NULL),
320      mMetadataChannel(NULL),
321      mPictureChannel(NULL),
322      mRawChannel(NULL),
323      mSupportChannel(NULL),
324      mAnalysisChannel(NULL),
325      mRawDumpChannel(NULL),
326      mDummyBatchChannel(NULL),
327      mChannelHandle(0),
328      mFirstRequest(false),
329      mFirstConfiguration(true),
330      mFlush(false),
331      mParamHeap(NULL),
332      mParameters(NULL),
333      mPrevParameters(NULL),
334      m_bIsVideo(false),
335      m_bIs4KVideo(false),
336      m_bEisSupportedSize(false),
337      m_bEisEnable(false),
338      m_MobicatMask(0),
339      mMinProcessedFrameDuration(0),
340      mMinJpegFrameDuration(0),
341      mMinRawFrameDuration(0),
342      mMetaFrameCount(0U),
343      mUpdateDebugLevel(false),
344      mCallbacks(callbacks),
345      mCaptureIntent(0),
346      mHybridAeEnable(0),
347      mBatchSize(0),
348      mToBeQueuedVidBufs(0),
349      mHFRVideoFps(DEFAULT_VIDEO_FPS),
350      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
351      mFirstFrameNumberInBatch(0),
352      mNeedSensorRestart(false),
353      mLdafCalibExist(false),
354      mPowerHintEnabled(false),
355      mLastCustIntentFrmNum(-1)
356{
357    getLogLevel();
358    m_perfLock.lock_init();
359    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
360    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
361    mCameraDevice.common.close = close_camera_device;
362    mCameraDevice.ops = &mCameraOps;
363    mCameraDevice.priv = this;
364    gCamCapability[cameraId]->version = CAM_HAL_V3;
365    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
366    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
367    gCamCapability[cameraId]->min_num_pp_bufs = 3;
368
369    PTHREAD_COND_INIT(&mRequestCond);
370    mPendingLiveRequest = 0;
371    mCurrentRequestId = -1;
372    pthread_mutex_init(&mMutex, NULL);
373
374    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
375        mDefaultMetadata[i] = NULL;
376
377    // Getting system props of different kinds
378    char prop[PROPERTY_VALUE_MAX];
379    memset(prop, 0, sizeof(prop));
380    property_get("persist.camera.raw.dump", prop, "0");
381    mEnableRawDump = atoi(prop);
382    if (mEnableRawDump)
383        CDBG("%s: Raw dump from Camera HAL enabled", __func__);
384
385    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
386    memset(mLdafCalib, 0, sizeof(mLdafCalib));
387
388    memset(prop, 0, sizeof(prop));
389    property_get("persist.camera.tnr.preview", prop, "1");
390    m_bTnrPreview = (uint8_t)atoi(prop);
391
392    memset(prop, 0, sizeof(prop));
393    property_get("persist.camera.tnr.video", prop, "1");
394    m_bTnrVideo = (uint8_t)atoi(prop);
395
396    mPendingBuffersMap.num_buffers = 0;
397    mPendingBuffersMap.last_frame_number = -1;
398}
399
400/*===========================================================================
401 * FUNCTION   : ~QCamera3HardwareInterface
402 *
403 * DESCRIPTION: destructor of QCamera3HardwareInterface
404 *
405 * PARAMETERS : none
406 *
407 * RETURN     : none
408 *==========================================================================*/
409QCamera3HardwareInterface::~QCamera3HardwareInterface()
410{
411    CDBG("%s: E", __func__);
412    bool hasPendingBuffers = (mPendingBuffersMap.num_buffers > 0);
413
414    /* Turn off current power hint before acquiring perfLock in case they
415     * conflict with each other */
416    disablePowerHint();
417
418    m_perfLock.lock_acq();
419
420    /* We need to stop all streams before deleting any stream */
421    if (mRawDumpChannel) {
422        mRawDumpChannel->stop();
423    }
424
425    // NOTE: 'camera3_stream_t *' objects are already freed at
426    //        this stage by the framework
427    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
428        it != mStreamInfo.end(); it++) {
429        QCamera3ProcessingChannel *channel = (*it)->channel;
430        if (channel) {
431            channel->stop();
432        }
433    }
434    if (mSupportChannel)
435        mSupportChannel->stop();
436
437    if (mAnalysisChannel) {
438        mAnalysisChannel->stop();
439    }
440    if (mMetadataChannel) {
441        mMetadataChannel->stop();
442    }
443    if (mChannelHandle) {
444        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
445                mChannelHandle);
446        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
447    }
448
449    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
450        it != mStreamInfo.end(); it++) {
451        QCamera3ProcessingChannel *channel = (*it)->channel;
452        if (channel)
453            delete channel;
454        free (*it);
455    }
456    if (mSupportChannel) {
457        delete mSupportChannel;
458        mSupportChannel = NULL;
459    }
460
461    if (mAnalysisChannel) {
462        delete mAnalysisChannel;
463        mAnalysisChannel = NULL;
464    }
465    if (mRawDumpChannel) {
466        delete mRawDumpChannel;
467        mRawDumpChannel = NULL;
468    }
469    if (mDummyBatchChannel) {
470        delete mDummyBatchChannel;
471        mDummyBatchChannel = NULL;
472    }
473    mPictureChannel = NULL;
474
475    if (mMetadataChannel) {
476        delete mMetadataChannel;
477        mMetadataChannel = NULL;
478    }
479
480    /* Clean up all channels */
481    if (mCameraInitialized) {
482        if(!mFirstConfiguration){
483            clear_metadata_buffer(mParameters);
484
485            // Check if there is still pending buffer not yet returned.
486            if (hasPendingBuffers) {
487                for (auto& pendingBuffer : mPendingBuffersMap.mPendingBufferList) {
488                    ALOGE("%s: Buffer not yet returned for stream. Frame number %d, format 0x%x, width %d, height %d",
489                        __func__, pendingBuffer.frame_number, pendingBuffer.stream->format, pendingBuffer.stream->width,
490                        pendingBuffer.stream->height);
491                }
492                ALOGE("%s: Last requested frame number is %d", __func__, mPendingBuffersMap.last_frame_number);
493                uint8_t restart = TRUE;
494                ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DAEMON_RESTART,
495                        restart);
496            }
497
498            //send the last unconfigure
499            cam_stream_size_info_t stream_config_info;
500            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
501            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
502            stream_config_info.buffer_info.max_buffers =
503                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
504            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
505                    stream_config_info);
506
507            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
508            if (rc < 0) {
509                ALOGE("%s: set_parms failed for unconfigure", __func__);
510            }
511        }
512        deinitParameters();
513    }
514
515    if (mChannelHandle) {
516        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
517                mChannelHandle);
518        ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
519        mChannelHandle = 0;
520    }
521
522    if (mCameraOpened)
523        closeCamera();
524
525    mPendingBuffersMap.mPendingBufferList.clear();
526    mPendingReprocessResultList.clear();
527    for (pendingRequestIterator i = mPendingRequestsList.begin();
528            i != mPendingRequestsList.end();) {
529        i = erasePendingRequest(i);
530    }
531    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
532        if (mDefaultMetadata[i])
533            free_camera_metadata(mDefaultMetadata[i]);
534
535    m_perfLock.lock_rel();
536    m_perfLock.lock_deinit();
537
538    pthread_cond_destroy(&mRequestCond);
539
540    pthread_mutex_destroy(&mMutex);
541
542    if (hasPendingBuffers) {
543        ALOGE("%s: Not all buffers were returned. Notified the camera daemon process to restart."
544                " Exiting here...", __func__);
545        exit(EXIT_FAILURE);
546    }
547    CDBG("%s: X", __func__);
548}
549
550/*===========================================================================
551 * FUNCTION   : erasePendingRequest
552 *
553 * DESCRIPTION: function to erase a desired pending request after freeing any
554 *              allocated memory
555 *
556 * PARAMETERS :
557 *   @i       : iterator pointing to pending request to be erased
558 *
559 * RETURN     : iterator pointing to the next request
560 *==========================================================================*/
561QCamera3HardwareInterface::pendingRequestIterator
562        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
563{
564    if (i->input_buffer != NULL) {
565        free(i->input_buffer);
566        i->input_buffer = NULL;
567    }
568    if (i->settings != NULL)
569        free_camera_metadata((camera_metadata_t*)i->settings);
570    return mPendingRequestsList.erase(i);
571}
572
573/*===========================================================================
574 * FUNCTION   : camEvtHandle
575 *
576 * DESCRIPTION: Function registered to mm-camera-interface to handle events
577 *
578 * PARAMETERS :
579 *   @camera_handle : interface layer camera handle
580 *   @evt           : ptr to event
581 *   @user_data     : user data ptr
582 *
583 * RETURN     : none
584 *==========================================================================*/
585void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
586                                          mm_camera_event_t *evt,
587                                          void *user_data)
588{
589    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
590    if (obj && evt) {
591        switch(evt->server_event_type) {
592            case CAM_EVENT_TYPE_DAEMON_DIED:
593                ALOGE("%s: Fatal, camera daemon died", __func__);
594                //close the camera backend
595                if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
596                        && obj->mCameraHandle->ops) {
597                    obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
598                } else {
599                    ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
600                            __func__);
601                }
602                camera3_notify_msg_t notify_msg;
603                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
604                notify_msg.type = CAMERA3_MSG_ERROR;
605                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
606                notify_msg.message.error.error_stream = NULL;
607                notify_msg.message.error.frame_number = 0;
608                obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
609                break;
610
611            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
612                CDBG("%s: HAL got request pull from Daemon", __func__);
613                pthread_mutex_lock(&obj->mMutex);
614                obj->mWokenUpByDaemon = true;
615                obj->unblockRequestIfNecessary();
616                pthread_mutex_unlock(&obj->mMutex);
617                break;
618
619            default:
620                CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
621                        evt->server_event_type);
622                break;
623        }
624    } else {
625        ALOGE("%s: NULL user_data/evt", __func__);
626    }
627}
628
629/*===========================================================================
630 * FUNCTION   : openCamera
631 *
632 * DESCRIPTION: open camera
633 *
634 * PARAMETERS :
635 *   @hw_device  : double ptr for camera device struct
636 *
637 * RETURN     : int32_t type of status
638 *              NO_ERROR  -- success
639 *              none-zero failure code
640 *==========================================================================*/
641int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
642{
643    int rc = 0;
644    if (mCameraOpened) {
645        *hw_device = NULL;
646        return PERMISSION_DENIED;
647    }
648    m_perfLock.lock_acq();
649    rc = openCamera();
650    if (rc == 0) {
651        *hw_device = &mCameraDevice.common;
652    } else
653        *hw_device = NULL;
654
655    m_perfLock.lock_rel();
656    return rc;
657}
658
659/*===========================================================================
660 * FUNCTION   : openCamera
661 *
662 * DESCRIPTION: open camera
663 *
664 * PARAMETERS : none
665 *
666 * RETURN     : int32_t type of status
667 *              NO_ERROR  -- success
668 *              none-zero failure code
669 *==========================================================================*/
670int QCamera3HardwareInterface::openCamera()
671{
672    int rc = 0;
673
674    ATRACE_CALL();
675    if (mCameraHandle) {
676        ALOGE("Failure: Camera already opened");
677        return ALREADY_EXISTS;
678    }
679
680    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
681    if (rc < 0) {
682        ALOGE("%s: Failed to reserve flash for camera id: %d",
683                __func__,
684                mCameraId);
685        return UNKNOWN_ERROR;
686    }
687
688    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
689    if (rc) {
690        ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
691        return rc;
692    }
693
694    mCameraOpened = true;
695
696    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
697            camEvtHandle, (void *)this);
698
699    if (rc < 0) {
700        ALOGE("%s: Error, failed to register event callback", __func__);
701        /* Not closing camera here since it is already handled in destructor */
702        return FAILED_TRANSACTION;
703    }
704    mFirstConfiguration = true;
705    return NO_ERROR;
706}
707
708/*===========================================================================
709 * FUNCTION   : closeCamera
710 *
711 * DESCRIPTION: close camera
712 *
713 * PARAMETERS : none
714 *
715 * RETURN     : int32_t type of status
716 *              NO_ERROR  -- success
717 *              none-zero failure code
718 *==========================================================================*/
719int QCamera3HardwareInterface::closeCamera()
720{
721    ATRACE_CALL();
722    int rc = NO_ERROR;
723
724    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
725    mCameraHandle = NULL;
726    mCameraOpened = false;
727
728    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
729        CDBG("%s: Failed to release flash for camera id: %d",
730                __func__,
731                mCameraId);
732    }
733
734    return rc;
735}
736
737/*===========================================================================
738 * FUNCTION   : initialize
739 *
740 * DESCRIPTION: Initialize frameworks callback functions
741 *
742 * PARAMETERS :
743 *   @callback_ops : callback function to frameworks
744 *
745 * RETURN     :
746 *
747 *==========================================================================*/
748int QCamera3HardwareInterface::initialize(
749        const struct camera3_callback_ops *callback_ops)
750{
751    ATRACE_CALL();
752    int rc;
753
754    pthread_mutex_lock(&mMutex);
755
756    rc = initParameters();
757    if (rc < 0) {
758        ALOGE("%s: initParamters failed %d", __func__, rc);
759       goto err1;
760    }
761    mCallbackOps = callback_ops;
762
763    mChannelHandle = mCameraHandle->ops->add_channel(
764            mCameraHandle->camera_handle, NULL, NULL, this);
765    if (mChannelHandle == 0) {
766        ALOGE("%s: add_channel failed", __func__);
767        rc = -ENOMEM;
768        pthread_mutex_unlock(&mMutex);
769        return rc;
770    }
771
772    pthread_mutex_unlock(&mMutex);
773    mCameraInitialized = true;
774    return 0;
775
776err1:
777    pthread_mutex_unlock(&mMutex);
778    return rc;
779}
780
781/*===========================================================================
782 * FUNCTION   : validateStreamDimensions
783 *
784 * DESCRIPTION: Check if the configuration requested are those advertised
785 *
786 * PARAMETERS :
787 *   @stream_list : streams to be configured
788 *
789 * RETURN     :
790 *
791 *==========================================================================*/
792int QCamera3HardwareInterface::validateStreamDimensions(
793        camera3_stream_configuration_t *streamList)
794{
795    int rc = NO_ERROR;
796    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
797    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
798    size_t count = 0;
799
800    camera3_stream_t *inputStream = NULL;
801    /*
802    * Loop through all streams to find input stream if it exists*
803    */
804    for (size_t i = 0; i< streamList->num_streams; i++) {
805        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
806            if (inputStream != NULL) {
807                ALOGE("%s: Error, Multiple input streams requested");
808                return -EINVAL;
809            }
810            inputStream = streamList->streams[i];
811        }
812    }
813    /*
814    * Loop through all streams requested in configuration
815    * Check if unsupported sizes have been requested on any of them
816    */
817    for (size_t j = 0; j < streamList->num_streams; j++) {
818        bool sizeFound = false;
819        size_t jpeg_sizes_cnt = 0;
820        camera3_stream_t *newStream = streamList->streams[j];
821
822        uint32_t rotatedHeight = newStream->height;
823        uint32_t rotatedWidth = newStream->width;
824        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
825                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
826            rotatedHeight = newStream->width;
827            rotatedWidth = newStream->height;
828        }
829
830        /*
831        * Sizes are different for each type of stream format check against
832        * appropriate table.
833        */
834        switch (newStream->format) {
835        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
836        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
837        case HAL_PIXEL_FORMAT_RAW10:
838            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
839            for (size_t i = 0; i < count; i++) {
840                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
841                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
842                    sizeFound = true;
843                    break;
844                }
845            }
846            break;
847        case HAL_PIXEL_FORMAT_BLOB:
848            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
849            /* Generate JPEG sizes table */
850            makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
851                    count,
852                    MAX_SIZES_CNT,
853                    available_processed_sizes);
854            jpeg_sizes_cnt = filterJpegSizes(
855                    available_jpeg_sizes,
856                    available_processed_sizes,
857                    count * 2,
858                    MAX_SIZES_CNT * 2,
859                    gCamCapability[mCameraId]->active_array_size,
860                    gCamCapability[mCameraId]->max_downscale_factor);
861
862            /* Verify set size against generated sizes table */
863            for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
864                if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
865                        ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
866                    sizeFound = true;
867                    break;
868                }
869            }
870            break;
871        case HAL_PIXEL_FORMAT_YCbCr_420_888:
872        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
873        default:
874            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
875                    || newStream->stream_type == CAMERA3_STREAM_INPUT
876                    || IS_USAGE_ZSL(newStream->usage)) {
877                if (((int32_t)rotatedWidth ==
878                                gCamCapability[mCameraId]->active_array_size.width) &&
879                                ((int32_t)rotatedHeight ==
880                                gCamCapability[mCameraId]->active_array_size.height)) {
881                    sizeFound = true;
882                    break;
883                }
884                /* We could potentially break here to enforce ZSL stream
885                 * set from frameworks always is full active array size
886                 * but it is not clear from the spc if framework will always
887                 * follow that, also we have logic to override to full array
888                 * size, so keeping the logic lenient at the moment
889                 */
890            }
891            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
892                    MAX_SIZES_CNT);
893            for (size_t i = 0; i < count; i++) {
894                if (((int32_t)rotatedWidth ==
895                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
896                            ((int32_t)rotatedHeight ==
897                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
898                    sizeFound = true;
899                    break;
900                }
901            }
902            break;
903        } /* End of switch(newStream->format) */
904
905        /* We error out even if a single stream has unsupported size set */
906        if (!sizeFound) {
907            ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
908                  "type:%d", __func__, rotatedWidth, rotatedHeight,
909                  newStream->format);
910            ALOGE("%s: Active array size is  %d x %d", __func__,
911                    gCamCapability[mCameraId]->active_array_size.width,
912                    gCamCapability[mCameraId]->active_array_size.height);
913            rc = -EINVAL;
914            break;
915        }
916    } /* End of for each stream */
917    return rc;
918}
919
920/*===========================================================================
921 * FUNCTION   : validateUsageFlags
922 *
923 * DESCRIPTION: Check if the configuration usage flags are supported
924 *
925 * PARAMETERS :
926 *   @stream_list : streams to be configured
927 *
928 * RETURN     :
929 *   NO_ERROR if the usage flags are supported
930 *   error code if usage flags are not supported
931 *
932 *==========================================================================*/
933int QCamera3HardwareInterface::validateUsageFlags(
934        const camera3_stream_configuration_t* streamList)
935{
936    for (size_t j = 0; j < streamList->num_streams; j++) {
937        const camera3_stream_t *newStream = streamList->streams[j];
938
939        if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
940            (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
941             newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
942            continue;
943        }
944
945        bool isVideo = IS_USAGE_VIDEO(newStream->usage);
946        bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
947        bool isZSL = IS_USAGE_ZSL(newStream->usage);
948
949        // Color space for this camera device is guaranteed to be ITU_R_601_FR.
950        // So color spaces will always match.
951
952        // Check whether underlying formats of shared streams match.
953        if (isVideo && isPreview) {
954            ALOGE("Combined video and preview usage flag is not supported");
955            return -EINVAL;
956        }
957        if (isPreview && isZSL) {
958            ALOGE("Combined preview and zsl usage flag is not supported");
959            return -EINVAL;
960        }
961        if (isVideo && isZSL) {
962            ALOGE("Combined video and zsl usage flag is not supported");
963            return -EINVAL;
964        }
965    }
966    return NO_ERROR;
967}
968
969/*==============================================================================
970 * FUNCTION   : isSupportChannelNeeded
971 *
972 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
973 *
974 * PARAMETERS :
975 *   @stream_list : streams to be configured
976 *   @stream_config_info : the config info for streams to be configured
977 *
978 * RETURN     : Boolen true/false decision
979 *
980 *==========================================================================*/
981bool QCamera3HardwareInterface::isSupportChannelNeeded(
982        camera3_stream_configuration_t *streamList,
983        cam_stream_size_info_t stream_config_info)
984{
985    uint32_t i;
986    bool pprocRequested = false;
987    /* Check for conditions where PProc pipeline does not have any streams*/
988    for (i = 0; i < stream_config_info.num_streams; i++) {
989        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
990                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
991            pprocRequested = true;
992            break;
993        }
994    }
995
996    if (pprocRequested == false )
997        return true;
998
999    /* Dummy stream needed if only raw or jpeg streams present */
1000    for (i = 0; i < streamList->num_streams; i++) {
1001        switch(streamList->streams[i]->format) {
1002            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1003            case HAL_PIXEL_FORMAT_RAW10:
1004            case HAL_PIXEL_FORMAT_RAW16:
1005            case HAL_PIXEL_FORMAT_BLOB:
1006                break;
1007            default:
1008                return false;
1009        }
1010    }
1011    return true;
1012}
1013
1014/*==============================================================================
1015 * FUNCTION   : getSensorOutputSize
1016 *
1017 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1018 *
1019 * PARAMETERS :
1020 *   @sensor_dim : sensor output dimension (output)
1021 *
1022 * RETURN     : int32_t type of status
1023 *              NO_ERROR  -- success
1024 *              none-zero failure code
1025 *
1026 *==========================================================================*/
1027int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1028{
1029    int32_t rc = NO_ERROR;
1030
1031    cam_dimension_t max_dim = {0, 0};
1032    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1033        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1034            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1035        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1036            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1037    }
1038
1039    clear_metadata_buffer(mParameters);
1040
1041    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1042            max_dim);
1043    if (rc != NO_ERROR) {
1044        ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
1045        return rc;
1046    }
1047
1048    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1049    if (rc != NO_ERROR) {
1050        ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
1051        return rc;
1052    }
1053
1054    clear_metadata_buffer(mParameters);
1055    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1056
1057    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1058            mParameters);
1059    if (rc != NO_ERROR) {
1060        ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
1061        return rc;
1062    }
1063
1064    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1065    ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
1066
1067    return rc;
1068}
1069
1070/*==============================================================================
1071 * FUNCTION   : enablePowerHint
1072 *
1073 * DESCRIPTION: enable single powerhint for preview and different video modes.
1074 *
1075 * PARAMETERS :
1076 *
1077 * RETURN     : NULL
1078 *
1079 *==========================================================================*/
1080void QCamera3HardwareInterface::enablePowerHint()
1081{
1082    if (!mPowerHintEnabled) {
1083        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
1084        mPowerHintEnabled = true;
1085    }
1086}
1087
1088/*==============================================================================
1089 * FUNCTION   : disablePowerHint
1090 *
1091 * DESCRIPTION: disable current powerhint.
1092 *
1093 * PARAMETERS :
1094 *
1095 * RETURN     : NULL
1096 *
1097 *==========================================================================*/
1098void QCamera3HardwareInterface::disablePowerHint()
1099{
1100    if (mPowerHintEnabled) {
1101        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
1102        mPowerHintEnabled = false;
1103    }
1104}
1105
1106/*===========================================================================
1107 * FUNCTION   : configureStreams
1108 *
1109 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1110 *              and output streams.
1111 *
1112 * PARAMETERS :
1113 *   @stream_list : streams to be configured
1114 *
1115 * RETURN     :
1116 *
1117 *==========================================================================*/
1118int QCamera3HardwareInterface::configureStreams(
1119        camera3_stream_configuration_t *streamList)
1120{
1121    ATRACE_CALL();
1122    int rc = 0;
1123
1124    // Acquire perfLock before configure streams
1125    m_perfLock.lock_acq();
1126    rc = configureStreamsPerfLocked(streamList);
1127    m_perfLock.lock_rel();
1128
1129    return rc;
1130}
1131
1132/*===========================================================================
1133 * FUNCTION   : configureStreamsPerfLocked
1134 *
1135 * DESCRIPTION: configureStreams while perfLock is held.
1136 *
1137 * PARAMETERS :
1138 *   @stream_list : streams to be configured
1139 *
1140 * RETURN     : int32_t type of status
1141 *              NO_ERROR  -- success
1142 *              none-zero failure code
1143 *==========================================================================*/
1144int QCamera3HardwareInterface::configureStreamsPerfLocked(
1145        camera3_stream_configuration_t *streamList)
1146{
1147    ATRACE_CALL();
1148    int rc = 0;
1149
1150    // Sanity check stream_list
1151    if (streamList == NULL) {
1152        ALOGE("%s: NULL stream configuration", __func__);
1153        return BAD_VALUE;
1154    }
1155    if (streamList->streams == NULL) {
1156        ALOGE("%s: NULL stream list", __func__);
1157        return BAD_VALUE;
1158    }
1159
1160    if (streamList->num_streams < 1) {
1161        ALOGE("%s: Bad number of streams requested: %d", __func__,
1162                streamList->num_streams);
1163        return BAD_VALUE;
1164    }
1165
1166    if (streamList->num_streams >= MAX_NUM_STREAMS) {
1167        ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
1168                MAX_NUM_STREAMS, streamList->num_streams);
1169        return BAD_VALUE;
1170    }
1171
1172    rc = validateUsageFlags(streamList);
1173    if (rc != NO_ERROR) {
1174        return rc;
1175    }
1176
1177    mOpMode = streamList->operation_mode;
1178    CDBG("%s: mOpMode: %d", __func__, mOpMode);
1179
1180    /* first invalidate all the steams in the mStreamList
1181     * if they appear again, they will be validated */
1182    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1183            it != mStreamInfo.end(); it++) {
1184        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1185        if (channel) {
1186          channel->stop();
1187        }
1188        (*it)->status = INVALID;
1189    }
1190
1191    if (mRawDumpChannel) {
1192        mRawDumpChannel->stop();
1193        delete mRawDumpChannel;
1194        mRawDumpChannel = NULL;
1195    }
1196
1197    if (mSupportChannel)
1198        mSupportChannel->stop();
1199
1200    if (mAnalysisChannel) {
1201        mAnalysisChannel->stop();
1202    }
1203    if (mMetadataChannel) {
1204        /* If content of mStreamInfo is not 0, there is metadata stream */
1205        mMetadataChannel->stop();
1206    }
1207    if (mChannelHandle) {
1208        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1209                mChannelHandle);
1210        ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
1211    }
1212
1213    pthread_mutex_lock(&mMutex);
1214
1215    /* Check whether we have video stream */
1216    m_bIs4KVideo = false;
1217    m_bIsVideo = false;
1218    m_bEisSupportedSize = false;
1219    m_bTnrEnabled = false;
1220    bool isZsl = false;
1221    uint32_t videoWidth = 0U;
1222    uint32_t videoHeight = 0U;
1223    size_t rawStreamCnt = 0;
1224    size_t stallStreamCnt = 0;
1225    size_t processedStreamCnt = 0;
1226    // Number of streams on ISP encoder path
1227    size_t numStreamsOnEncoder = 0;
1228    size_t numYuv888OnEncoder = 0;
1229    bool bYuv888OverrideJpeg = false;
1230    cam_dimension_t largeYuv888Size = {0, 0};
1231    cam_dimension_t maxViewfinderSize = {0, 0};
1232    bool bJpegExceeds4K = false;
1233    bool bJpegOnEncoder = false;
1234    bool bUseCommonFeatureMask = false;
1235    uint32_t commonFeatureMask = 0;
1236    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1237    camera3_stream_t *inputStream = NULL;
1238    bool isJpeg = false;
1239    cam_dimension_t jpegSize = {0, 0};
1240
1241    /*EIS configuration*/
1242    bool eisSupported = false;
1243    bool oisSupported = false;
1244    int32_t margin_index = -1;
1245    uint8_t eis_prop_set;
1246    uint32_t maxEisWidth = 0;
1247    uint32_t maxEisHeight = 0;
1248    int32_t hal_version = CAM_HAL_V3;
1249
1250    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1251
1252    size_t count = IS_TYPE_MAX;
1253    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1254    for (size_t i = 0; i < count; i++) {
1255        if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
1256            eisSupported = true;
1257            margin_index = (int32_t)i;
1258            break;
1259        }
1260    }
1261
1262    count = CAM_OPT_STAB_MAX;
1263    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1264    for (size_t i = 0; i < count; i++) {
1265        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
1266            oisSupported = true;
1267            break;
1268        }
1269    }
1270
1271    if (eisSupported) {
1272        maxEisWidth = MAX_EIS_WIDTH;
1273        maxEisHeight = MAX_EIS_HEIGHT;
1274    }
1275
1276    /* EIS setprop control */
1277    char eis_prop[PROPERTY_VALUE_MAX];
1278    memset(eis_prop, 0, sizeof(eis_prop));
1279    property_get("persist.camera.eis.enable", eis_prop, "0");
1280    eis_prop_set = (uint8_t)atoi(eis_prop);
1281
1282    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
1283            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1284
1285    /* stream configurations */
1286    for (size_t i = 0; i < streamList->num_streams; i++) {
1287        camera3_stream_t *newStream = streamList->streams[i];
1288        ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
1289                "height = %d, rotation = %d, usage = 0x%x",
1290                __func__, i, newStream->stream_type, newStream->format,
1291                newStream->width, newStream->height, newStream->rotation,
1292                newStream->usage);
1293        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1294                newStream->stream_type == CAMERA3_STREAM_INPUT){
1295            isZsl = true;
1296        }
1297        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1298            inputStream = newStream;
1299        }
1300
1301        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1302            isJpeg = true;
1303            jpegSize.width = newStream->width;
1304            jpegSize.height = newStream->height;
1305            if (newStream->width > VIDEO_4K_WIDTH ||
1306                    newStream->height > VIDEO_4K_HEIGHT)
1307                bJpegExceeds4K = true;
1308        }
1309
1310        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1311                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1312            m_bIsVideo = true;
1313            videoWidth = newStream->width;
1314            videoHeight = newStream->height;
1315            if ((VIDEO_4K_WIDTH <= newStream->width) &&
1316                    (VIDEO_4K_HEIGHT <= newStream->height)) {
1317                m_bIs4KVideo = true;
1318            }
1319            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1320                                  (newStream->height <= maxEisHeight);
1321        }
1322        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1323                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1324            switch (newStream->format) {
1325            case HAL_PIXEL_FORMAT_BLOB:
1326                stallStreamCnt++;
1327                if (isOnEncoder(maxViewfinderSize, newStream->width,
1328                        newStream->height)) {
1329                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1330                    numStreamsOnEncoder++;
1331                    bJpegOnEncoder = true;
1332                }
1333                break;
1334            case HAL_PIXEL_FORMAT_RAW10:
1335            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1336            case HAL_PIXEL_FORMAT_RAW16:
1337                rawStreamCnt++;
1338                break;
1339            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1340                processedStreamCnt++;
1341                if (isOnEncoder(maxViewfinderSize, newStream->width,
1342                        newStream->height)) {
1343                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1344                            IS_USAGE_ZSL(newStream->usage)) {
1345                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1346                    } else {
1347                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1348                    }
1349                    numStreamsOnEncoder++;
1350                }
1351                break;
1352            case HAL_PIXEL_FORMAT_YCbCr_420_888:
1353                processedStreamCnt++;
1354                if (isOnEncoder(maxViewfinderSize, newStream->width,
1355                        newStream->height)) {
1356                    // If Yuv888 size is not greater than 4K, set feature mask
1357                    // to SUPERSET so that it support concurrent request on
1358                    // YUV and JPEG.
1359                    if (newStream->width <= VIDEO_4K_WIDTH &&
1360                            newStream->height <= VIDEO_4K_HEIGHT) {
1361                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1362                    } else {
1363                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
1364                    }
1365                    numStreamsOnEncoder++;
1366                    numYuv888OnEncoder++;
1367                    largeYuv888Size.width = newStream->width;
1368                    largeYuv888Size.height = newStream->height;
1369                }
1370                break;
1371            default:
1372                processedStreamCnt++;
1373                if (isOnEncoder(maxViewfinderSize, newStream->width,
1374                        newStream->height)) {
1375                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1376                    numStreamsOnEncoder++;
1377                }
1378                break;
1379            }
1380
1381        }
1382    }
1383
1384    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1385        !m_bIsVideo) {
1386        m_bEisEnable = false;
1387    }
1388
1389    /* Logic to enable/disable TNR based on specific config size/etc.*/
1390    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1391            ((videoWidth == 1920 && videoHeight == 1080) ||
1392            (videoWidth == 1280 && videoHeight == 720)) &&
1393            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1394        m_bTnrEnabled = true;
1395
1396    /* Check if num_streams is sane */
1397    if (stallStreamCnt > MAX_STALLING_STREAMS ||
1398            rawStreamCnt > MAX_RAW_STREAMS ||
1399            processedStreamCnt > MAX_PROCESSED_STREAMS) {
1400        ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
1401                __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
1402        pthread_mutex_unlock(&mMutex);
1403        return -EINVAL;
1404    }
1405    /* Check whether we have zsl stream or 4k video case */
1406    if (isZsl && m_bIsVideo) {
1407        ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
1408        pthread_mutex_unlock(&mMutex);
1409        return -EINVAL;
1410    }
1411    /* Check if stream sizes are sane */
1412    if (numStreamsOnEncoder > 2) {
1413        ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
1414                __func__);
1415        pthread_mutex_unlock(&mMutex);
1416        return -EINVAL;
1417    } else if (1 < numStreamsOnEncoder){
1418        bUseCommonFeatureMask = true;
1419        CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
1420                __func__);
1421    }
1422
1423    /* Check if BLOB size is greater than 4k in 4k recording case */
1424    if (m_bIs4KVideo && bJpegExceeds4K) {
1425        ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
1426                __func__);
1427        pthread_mutex_unlock(&mMutex);
1428        return -EINVAL;
1429    }
1430
1431    // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1432    // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1433    // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1434    // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1435    // configurations:
1436    //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1437    //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1438    //    (These two configurations will not have CAC2 enabled even in HQ modes.)
1439    if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1440        ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1441                __func__);
1442        pthread_mutex_unlock(&mMutex);
1443        return -EINVAL;
1444    }
1445
1446    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1447    // the YUV stream's size is greater or equal to the JPEG size, set common
1448    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1449    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1450            jpegSize.width, jpegSize.height) &&
1451            largeYuv888Size.width > jpegSize.width &&
1452            largeYuv888Size.height > jpegSize.height) {
1453        bYuv888OverrideJpeg = true;
1454    } else if (!isJpeg && numStreamsOnEncoder > 1) {
1455        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1456    }
1457
1458    rc = validateStreamDimensions(streamList);
1459    if (rc == NO_ERROR) {
1460        rc = validateStreamRotations(streamList);
1461    }
1462    if (rc != NO_ERROR) {
1463        ALOGE("%s: Invalid stream configuration requested!", __func__);
1464        pthread_mutex_unlock(&mMutex);
1465        return rc;
1466    }
1467
1468    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1469    camera3_stream_t *jpegStream = NULL;
1470    for (size_t i = 0; i < streamList->num_streams; i++) {
1471        camera3_stream_t *newStream = streamList->streams[i];
1472        CDBG_HIGH("%s: newStream type = %d, stream format = %d "
1473                "stream size : %d x %d, stream rotation = %d",
1474                __func__, newStream->stream_type, newStream->format,
1475                newStream->width, newStream->height, newStream->rotation);
1476        //if the stream is in the mStreamList validate it
1477        bool stream_exists = false;
1478        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1479                it != mStreamInfo.end(); it++) {
1480            if ((*it)->stream == newStream) {
1481                QCamera3ProcessingChannel *channel =
1482                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
1483                stream_exists = true;
1484                if (channel)
1485                    delete channel;
1486                (*it)->status = VALID;
1487                (*it)->stream->priv = NULL;
1488                (*it)->channel = NULL;
1489            }
1490        }
1491        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1492            //new stream
1493            stream_info_t* stream_info;
1494            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1495            if (!stream_info) {
1496               ALOGE("%s: Could not allocate stream info", __func__);
1497               rc = -ENOMEM;
1498               pthread_mutex_unlock(&mMutex);
1499               return rc;
1500            }
1501            stream_info->stream = newStream;
1502            stream_info->status = VALID;
1503            stream_info->channel = NULL;
1504            mStreamInfo.push_back(stream_info);
1505        }
1506        /* Covers Opaque ZSL and API1 F/W ZSL */
1507        if (IS_USAGE_ZSL(newStream->usage)
1508                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1509            if (zslStream != NULL) {
1510                ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
1511                pthread_mutex_unlock(&mMutex);
1512                return BAD_VALUE;
1513            }
1514            zslStream = newStream;
1515        }
1516        /* Covers YUV reprocess */
1517        if (inputStream != NULL) {
1518            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1519                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1520                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1521                    && inputStream->width == newStream->width
1522                    && inputStream->height == newStream->height) {
1523                if (zslStream != NULL) {
1524                    /* This scenario indicates multiple YUV streams with same size
1525                     * as input stream have been requested, since zsl stream handle
1526                     * is solely use for the purpose of overriding the size of streams
1527                     * which share h/w streams we will just make a guess here as to
1528                     * which of the stream is a ZSL stream, this will be refactored
1529                     * once we make generic logic for streams sharing encoder output
1530                     */
1531                    CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
1532                }
1533                zslStream = newStream;
1534            }
1535        }
1536        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1537            jpegStream = newStream;
1538        }
1539    }
1540
1541    /* If a zsl stream is set, we know that we have configured at least one input or
1542       bidirectional stream */
1543    if (NULL != zslStream) {
1544        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1545        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1546        mInputStreamInfo.format = zslStream->format;
1547        mInputStreamInfo.usage = zslStream->usage;
1548        CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
1549                __func__, mInputStreamInfo.dim.width,
1550                mInputStreamInfo.dim.height,
1551                mInputStreamInfo.format, mInputStreamInfo.usage);
1552    }
1553
1554    cleanAndSortStreamInfo();
1555    if (mMetadataChannel) {
1556        delete mMetadataChannel;
1557        mMetadataChannel = NULL;
1558    }
1559    if (mSupportChannel) {
1560        delete mSupportChannel;
1561        mSupportChannel = NULL;
1562    }
1563
1564    if (mAnalysisChannel) {
1565        delete mAnalysisChannel;
1566        mAnalysisChannel = NULL;
1567    }
1568
1569    if (mDummyBatchChannel) {
1570        delete mDummyBatchChannel;
1571        mDummyBatchChannel = NULL;
1572    }
1573
1574    //Create metadata channel and initialize it
1575    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1576                    mChannelHandle, mCameraHandle->ops, captureResultCb,
1577                    &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
1578    if (mMetadataChannel == NULL) {
1579        ALOGE("%s: failed to allocate metadata channel", __func__);
1580        rc = -ENOMEM;
1581        pthread_mutex_unlock(&mMutex);
1582        return rc;
1583    }
1584    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1585    if (rc < 0) {
1586        ALOGE("%s: metadata channel initialization failed", __func__);
1587        delete mMetadataChannel;
1588        mMetadataChannel = NULL;
1589        pthread_mutex_unlock(&mMutex);
1590        return rc;
1591    }
1592
1593    // Create analysis stream all the time, even when h/w support is not available
1594    {
1595        mAnalysisChannel = new QCamera3SupportChannel(
1596                mCameraHandle->camera_handle,
1597                mChannelHandle,
1598                mCameraHandle->ops,
1599                &gCamCapability[mCameraId]->padding_info,
1600                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1601                CAM_STREAM_TYPE_ANALYSIS,
1602                &gCamCapability[mCameraId]->analysis_recommended_res,
1603                gCamCapability[mCameraId]->analysis_recommended_format,
1604                this,
1605                0); // force buffer count to 0
1606        if (!mAnalysisChannel) {
1607            ALOGE("%s: H/W Analysis channel cannot be created", __func__);
1608            pthread_mutex_unlock(&mMutex);
1609            return -ENOMEM;
1610        }
1611    }
1612
1613    bool isRawStreamRequested = false;
1614    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1615    /* Allocate channel objects for the requested streams */
1616    for (size_t i = 0; i < streamList->num_streams; i++) {
1617        camera3_stream_t *newStream = streamList->streams[i];
1618        uint32_t stream_usage = newStream->usage;
1619        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1620        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1621        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1622                || IS_USAGE_ZSL(newStream->usage)) &&
1623            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1624            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1625            if (bUseCommonFeatureMask) {
1626                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1627                        commonFeatureMask;
1628            } else {
1629                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1630                        CAM_QCOM_FEATURE_NONE;
1631            }
1632
1633        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1634                CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
1635        } else {
1636            //for non zsl streams find out the format
1637            switch (newStream->format) {
1638            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1639              {
1640                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1641                         = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1642
1643                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1644
1645                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
1646                     if (m_bTnrEnabled && m_bTnrVideo) {
1647                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1648                             CAM_QCOM_FEATURE_CPP_TNR;
1649                     }
1650
1651                 } else {
1652
1653                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
1654                     if (m_bTnrEnabled && m_bTnrPreview) {
1655                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1656                             CAM_QCOM_FEATURE_CPP_TNR;
1657                     }
1658                 }
1659
1660                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1661                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1662                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1663                             newStream->height;
1664                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1665                             newStream->width;
1666                 }
1667              }
1668              break;
1669           case HAL_PIXEL_FORMAT_YCbCr_420_888:
1670              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1671              if (isOnEncoder(maxViewfinderSize, newStream->width,
1672                      newStream->height)) {
1673                  if (bUseCommonFeatureMask)
1674                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1675                              commonFeatureMask;
1676                  else
1677                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1678                              CAM_QCOM_FEATURE_NONE;
1679              } else {
1680                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1681                          CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1682              }
1683              break;
1684           case HAL_PIXEL_FORMAT_BLOB:
1685              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1686              if (m_bIs4KVideo && !isZsl) {
1687                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
1688                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1689              } else {
1690                  if (bUseCommonFeatureMask &&
1691                          isOnEncoder(maxViewfinderSize, newStream->width,
1692                                  newStream->height)) {
1693                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1694                  } else {
1695                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1696                  }
1697              }
1698              if (isZsl) {
1699                  if (zslStream) {
1700                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1701                              (int32_t)zslStream->width;
1702                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1703                              (int32_t)zslStream->height;
1704                  } else {
1705                      ALOGE("%s: Error, No ZSL stream identified",__func__);
1706                      pthread_mutex_unlock(&mMutex);
1707                      return -EINVAL;
1708                  }
1709              } else if (m_bIs4KVideo) {
1710                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1711                          (int32_t)videoWidth;
1712                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1713                          (int32_t)videoHeight;
1714              } else if (bYuv888OverrideJpeg) {
1715                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1716                          (int32_t)largeYuv888Size.width;
1717                  mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1718                          (int32_t)largeYuv888Size.height;
1719              }
1720              break;
1721           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1722           case HAL_PIXEL_FORMAT_RAW16:
1723           case HAL_PIXEL_FORMAT_RAW10:
1724              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1725              isRawStreamRequested = true;
1726              break;
1727           default:
1728              mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1729              mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1730              break;
1731            }
1732
1733        }
1734
1735        if (newStream->priv == NULL) {
1736            //New stream, construct channel
1737            switch (newStream->stream_type) {
1738            case CAMERA3_STREAM_INPUT:
1739                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1740                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1741                break;
1742            case CAMERA3_STREAM_BIDIRECTIONAL:
1743                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1744                    GRALLOC_USAGE_HW_CAMERA_WRITE;
1745                break;
1746            case CAMERA3_STREAM_OUTPUT:
1747                /* For video encoding stream, set read/write rarely
1748                 * flag so that they may be set to un-cached */
1749                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1750                    newStream->usage |=
1751                         (GRALLOC_USAGE_SW_READ_RARELY |
1752                         GRALLOC_USAGE_SW_WRITE_RARELY |
1753                         GRALLOC_USAGE_HW_CAMERA_WRITE);
1754                else if (IS_USAGE_ZSL(newStream->usage))
1755                    CDBG("%s: ZSL usage flag skipping", __func__);
1756                else if (newStream == zslStream
1757                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1758                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
1759                } else
1760                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
1761                break;
1762            default:
1763                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
1764                break;
1765            }
1766
1767            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
1768                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
1769                QCamera3ProcessingChannel *channel = NULL;
1770                switch (newStream->format) {
1771                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1772                    if ((newStream->usage &
1773                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
1774                            (streamList->operation_mode ==
1775                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1776                    ) {
1777                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1778                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1779                                &gCamCapability[mCameraId]->padding_info,
1780                                this,
1781                                newStream,
1782                                (cam_stream_type_t)
1783                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1784                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1785                                mMetadataChannel,
1786                                0); //heap buffers are not required for HFR video channel
1787                        if (channel == NULL) {
1788                            ALOGE("%s: allocation of channel failed", __func__);
1789                            pthread_mutex_unlock(&mMutex);
1790                            return -ENOMEM;
1791                        }
1792                        //channel->getNumBuffers() will return 0 here so use
1793                        //MAX_INFLIGH_HFR_REQUESTS
1794                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
1795                        newStream->priv = channel;
1796                        ALOGI("%s: num video buffers in HFR mode: %d",
1797                                __func__, MAX_INFLIGHT_HFR_REQUESTS);
1798                    } else {
1799                        /* Copy stream contents in HFR preview only case to create
1800                         * dummy batch channel so that sensor streaming is in
1801                         * HFR mode */
1802                        if (!m_bIsVideo && (streamList->operation_mode ==
1803                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
1804                            mDummyBatchStream = *newStream;
1805                        }
1806                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1807                                mChannelHandle, mCameraHandle->ops, captureResultCb,
1808                                &gCamCapability[mCameraId]->padding_info,
1809                                this,
1810                                newStream,
1811                                (cam_stream_type_t)
1812                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1813                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1814                                mMetadataChannel,
1815                                MAX_INFLIGHT_REQUESTS);
1816                        if (channel == NULL) {
1817                            ALOGE("%s: allocation of channel failed", __func__);
1818                            pthread_mutex_unlock(&mMutex);
1819                            return -ENOMEM;
1820                        }
1821                        newStream->max_buffers = channel->getNumBuffers();
1822                        newStream->priv = channel;
1823                    }
1824                    break;
1825                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
1826                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
1827                            mChannelHandle,
1828                            mCameraHandle->ops, captureResultCb,
1829                            &gCamCapability[mCameraId]->padding_info,
1830                            this,
1831                            newStream,
1832                            (cam_stream_type_t)
1833                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1834                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1835                            mMetadataChannel);
1836                    if (channel == NULL) {
1837                        ALOGE("%s: allocation of YUV channel failed", __func__);
1838                        pthread_mutex_unlock(&mMutex);
1839                        return -ENOMEM;
1840                    }
1841                    newStream->max_buffers = channel->getNumBuffers();
1842                    newStream->priv = channel;
1843                    break;
1844                }
1845                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1846                case HAL_PIXEL_FORMAT_RAW16:
1847                case HAL_PIXEL_FORMAT_RAW10:
1848                    mRawChannel = new QCamera3RawChannel(
1849                            mCameraHandle->camera_handle, mChannelHandle,
1850                            mCameraHandle->ops, captureResultCb,
1851                            &gCamCapability[mCameraId]->padding_info,
1852                            this, newStream, CAM_QCOM_FEATURE_NONE,
1853                            mMetadataChannel,
1854                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
1855                    if (mRawChannel == NULL) {
1856                        ALOGE("%s: allocation of raw channel failed", __func__);
1857                        pthread_mutex_unlock(&mMutex);
1858                        return -ENOMEM;
1859                    }
1860                    newStream->max_buffers = mRawChannel->getNumBuffers();
1861                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
1862                    break;
1863                case HAL_PIXEL_FORMAT_BLOB:
1864                    // Max live snapshot inflight buffer is 1. This is to mitigate
1865                    // frame drop issues for video snapshot. The more buffers being
1866                    // allocated, the more frame drops there are.
1867                    mPictureChannel = new QCamera3PicChannel(
1868                            mCameraHandle->camera_handle, mChannelHandle,
1869                            mCameraHandle->ops, captureResultCb,
1870                            &gCamCapability[mCameraId]->padding_info, this, newStream,
1871                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1872                            m_bIs4KVideo, isZsl, mMetadataChannel,
1873                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
1874                    if (mPictureChannel == NULL) {
1875                        ALOGE("%s: allocation of channel failed", __func__);
1876                        pthread_mutex_unlock(&mMutex);
1877                        return -ENOMEM;
1878                    }
1879                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
1880                    newStream->max_buffers = mPictureChannel->getNumBuffers();
1881                    mPictureChannel->overrideYuvSize(
1882                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
1883                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
1884                    break;
1885
1886                default:
1887                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
1888                    pthread_mutex_unlock(&mMutex);
1889                    return -EINVAL;
1890                }
1891            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
1892                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
1893            } else {
1894                ALOGE("%s: Error, Unknown stream type", __func__);
1895                return -EINVAL;
1896            }
1897
1898            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1899                    it != mStreamInfo.end(); it++) {
1900                if ((*it)->stream == newStream) {
1901                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
1902                    break;
1903                }
1904            }
1905        } else {
1906            // Channel already exists for this stream
1907            // Do nothing for now
1908        }
1909
1910    /* Do not add entries for input stream in metastream info
1911         * since there is no real stream associated with it
1912         */
1913        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
1914            mStreamConfigInfo.num_streams++;
1915    }
1916
1917    //RAW DUMP channel
1918    if (mEnableRawDump && isRawStreamRequested == false){
1919        cam_dimension_t rawDumpSize;
1920        rawDumpSize = getMaxRawSize(mCameraId);
1921        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
1922                                  mChannelHandle,
1923                                  mCameraHandle->ops,
1924                                  rawDumpSize,
1925                                  &gCamCapability[mCameraId]->padding_info,
1926                                  this, CAM_QCOM_FEATURE_NONE);
1927        if (!mRawDumpChannel) {
1928            ALOGE("%s: Raw Dump channel cannot be created", __func__);
1929            pthread_mutex_unlock(&mMutex);
1930            return -ENOMEM;
1931        }
1932    }
1933
1934
1935    if (mAnalysisChannel) {
1936        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1937                gCamCapability[mCameraId]->analysis_recommended_res;
1938        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1939                CAM_STREAM_TYPE_ANALYSIS;
1940        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1941                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1942        mStreamConfigInfo.num_streams++;
1943    }
1944
1945    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
1946        mSupportChannel = new QCamera3SupportChannel(
1947                mCameraHandle->camera_handle,
1948                mChannelHandle,
1949                mCameraHandle->ops,
1950                &gCamCapability[mCameraId]->padding_info,
1951                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1952                CAM_STREAM_TYPE_CALLBACK,
1953                &QCamera3SupportChannel::kDim,
1954                CAM_FORMAT_YUV_420_NV21,
1955                this);
1956        if (!mSupportChannel) {
1957            ALOGE("%s: dummy channel cannot be created", __func__);
1958            pthread_mutex_unlock(&mMutex);
1959            return -ENOMEM;
1960        }
1961    }
1962
1963    if (mSupportChannel) {
1964        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1965                QCamera3SupportChannel::kDim;
1966        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1967                CAM_STREAM_TYPE_CALLBACK;
1968        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1969                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1970        mStreamConfigInfo.num_streams++;
1971    }
1972
1973    if (mRawDumpChannel) {
1974        cam_dimension_t rawSize;
1975        rawSize = getMaxRawSize(mCameraId);
1976        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
1977                rawSize;
1978        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1979                CAM_STREAM_TYPE_RAW;
1980        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1981                CAM_QCOM_FEATURE_NONE;
1982        mStreamConfigInfo.num_streams++;
1983    }
1984    /* In HFR mode, if video stream is not added, create a dummy channel so that
1985     * ISP can create a batch mode even for preview only case. This channel is
1986     * never 'start'ed (no stream-on), it is only 'initialized'  */
1987    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
1988            !m_bIsVideo) {
1989        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
1990                mChannelHandle,
1991                mCameraHandle->ops, captureResultCb,
1992                &gCamCapability[mCameraId]->padding_info,
1993                this,
1994                &mDummyBatchStream,
1995                CAM_STREAM_TYPE_VIDEO,
1996                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
1997                mMetadataChannel);
1998        if (NULL == mDummyBatchChannel) {
1999            ALOGE("%s: creation of mDummyBatchChannel failed."
2000                    "Preview will use non-hfr sensor mode ", __func__);
2001        }
2002    }
2003    if (mDummyBatchChannel) {
2004        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2005                mDummyBatchStream.width;
2006        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2007                mDummyBatchStream.height;
2008        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2009                CAM_STREAM_TYPE_VIDEO;
2010        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2011                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2012        mStreamConfigInfo.num_streams++;
2013    }
2014
2015    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2016    mStreamConfigInfo.buffer_info.max_buffers =
2017            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2018
2019    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
2020    for (pendingRequestIterator i = mPendingRequestsList.begin();
2021            i != mPendingRequestsList.end();) {
2022        i = erasePendingRequest(i);
2023    }
2024    mPendingFrameDropList.clear();
2025    // Initialize/Reset the pending buffers list
2026    mPendingBuffersMap.num_buffers = 0;
2027    mPendingBuffersMap.mPendingBufferList.clear();
2028    mPendingReprocessResultList.clear();
2029
2030    mFirstRequest = true;
2031    mCurJpegMeta.clear();
2032    //Get min frame duration for this streams configuration
2033    deriveMinFrameDuration();
2034
2035    /* Turn on video hint only if video stream is configured */
2036
2037    pthread_mutex_unlock(&mMutex);
2038
2039    return rc;
2040}
2041
2042/*===========================================================================
2043 * FUNCTION   : validateCaptureRequest
2044 *
2045 * DESCRIPTION: validate a capture request from camera service
2046 *
2047 * PARAMETERS :
2048 *   @request : request from framework to process
2049 *
2050 * RETURN     :
2051 *
2052 *==========================================================================*/
2053int QCamera3HardwareInterface::validateCaptureRequest(
2054                    camera3_capture_request_t *request)
2055{
2056    ssize_t idx = 0;
2057    const camera3_stream_buffer_t *b;
2058    CameraMetadata meta;
2059
2060    /* Sanity check the request */
2061    if (request == NULL) {
2062        ALOGE("%s: NULL capture request", __func__);
2063        return BAD_VALUE;
2064    }
2065
2066    if (request->settings == NULL && mFirstRequest) {
2067        /*settings cannot be null for the first request*/
2068        return BAD_VALUE;
2069    }
2070
2071    uint32_t frameNumber = request->frame_number;
2072    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2073        ALOGE("%s: Request %d: No output buffers provided!",
2074                __FUNCTION__, frameNumber);
2075        return BAD_VALUE;
2076    }
2077    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2078        ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
2079                __func__, request->num_output_buffers, MAX_NUM_STREAMS);
2080        return BAD_VALUE;
2081    }
2082    if (request->input_buffer != NULL) {
2083        b = request->input_buffer;
2084        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2085            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2086                    __func__, frameNumber, (long)idx);
2087            return BAD_VALUE;
2088        }
2089        if (b->release_fence != -1) {
2090            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2091                    __func__, frameNumber, (long)idx);
2092            return BAD_VALUE;
2093        }
2094        if (b->buffer == NULL) {
2095            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2096                    __func__, frameNumber, (long)idx);
2097            return BAD_VALUE;
2098        }
2099    }
2100
2101    // Validate all buffers
2102    b = request->output_buffers;
2103    do {
2104        QCamera3ProcessingChannel *channel =
2105                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2106        if (channel == NULL) {
2107            ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
2108                    __func__, frameNumber, (long)idx);
2109            return BAD_VALUE;
2110        }
2111        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2112            ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
2113                    __func__, frameNumber, (long)idx);
2114            return BAD_VALUE;
2115        }
2116        if (b->release_fence != -1) {
2117            ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
2118                    __func__, frameNumber, (long)idx);
2119            return BAD_VALUE;
2120        }
2121        if (b->buffer == NULL) {
2122            ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
2123                    __func__, frameNumber, (long)idx);
2124            return BAD_VALUE;
2125        }
2126        if (*(b->buffer) == NULL) {
2127            ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
2128                    __func__, frameNumber, (long)idx);
2129            return BAD_VALUE;
2130        }
2131        idx++;
2132        b = request->output_buffers + idx;
2133    } while (idx < (ssize_t)request->num_output_buffers);
2134
2135    return NO_ERROR;
2136}
2137
2138/*===========================================================================
2139 * FUNCTION   : deriveMinFrameDuration
2140 *
2141 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2142 *              on currently configured streams.
2143 *
2144 * PARAMETERS : NONE
2145 *
2146 * RETURN     : NONE
2147 *
2148 *==========================================================================*/
2149void QCamera3HardwareInterface::deriveMinFrameDuration()
2150{
2151    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2152
2153    maxJpegDim = 0;
2154    maxProcessedDim = 0;
2155    maxRawDim = 0;
2156
2157    // Figure out maximum jpeg, processed, and raw dimensions
2158    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2159        it != mStreamInfo.end(); it++) {
2160
2161        // Input stream doesn't have valid stream_type
2162        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2163            continue;
2164
2165        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2166        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2167            if (dimension > maxJpegDim)
2168                maxJpegDim = dimension;
2169        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2170                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2171                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2172            if (dimension > maxRawDim)
2173                maxRawDim = dimension;
2174        } else {
2175            if (dimension > maxProcessedDim)
2176                maxProcessedDim = dimension;
2177        }
2178    }
2179
2180    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2181            MAX_SIZES_CNT);
2182
2183    //Assume all jpeg dimensions are in processed dimensions.
2184    if (maxJpegDim > maxProcessedDim)
2185        maxProcessedDim = maxJpegDim;
2186    //Find the smallest raw dimension that is greater or equal to jpeg dimension
2187    if (maxProcessedDim > maxRawDim) {
2188        maxRawDim = INT32_MAX;
2189
2190        for (size_t i = 0; i < count; i++) {
2191            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2192                    gCamCapability[mCameraId]->raw_dim[i].height;
2193            if (dimension >= maxProcessedDim && dimension < maxRawDim)
2194                maxRawDim = dimension;
2195        }
2196    }
2197
2198    //Find minimum durations for processed, jpeg, and raw
2199    for (size_t i = 0; i < count; i++) {
2200        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2201                gCamCapability[mCameraId]->raw_dim[i].height) {
2202            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2203            break;
2204        }
2205    }
2206    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2207    for (size_t i = 0; i < count; i++) {
2208        if (maxProcessedDim ==
2209                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2210                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2211            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2212            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2213            break;
2214        }
2215    }
2216}
2217
2218/*===========================================================================
2219 * FUNCTION   : getMinFrameDuration
2220 *
2221 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2222 *              and current request configuration.
2223 *
2224 * PARAMETERS : @request: requset sent by the frameworks
2225 *
2226 * RETURN     : min farme duration for a particular request
2227 *
2228 *==========================================================================*/
2229int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2230{
2231    bool hasJpegStream = false;
2232    bool hasRawStream = false;
2233    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2234        const camera3_stream_t *stream = request->output_buffers[i].stream;
2235        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2236            hasJpegStream = true;
2237        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2238                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2239                stream->format == HAL_PIXEL_FORMAT_RAW16)
2240            hasRawStream = true;
2241    }
2242
2243    if (!hasJpegStream)
2244        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2245    else
2246        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2247}
2248
2249/*===========================================================================
2250 * FUNCTION   : handlePendingReprocResults
2251 *
2252 * DESCRIPTION: check and notify on any pending reprocess results
2253 *
2254 * PARAMETERS :
2255 *   @frame_number   : Pending request frame number
2256 *
2257 * RETURN     : int32_t type of status
2258 *              NO_ERROR  -- success
2259 *              none-zero failure code
2260 *==========================================================================*/
2261int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2262{
2263    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2264            j != mPendingReprocessResultList.end(); j++) {
2265        if (j->frame_number == frame_number) {
2266            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2267
2268            CDBG("%s: Delayed reprocess notify %d", __func__,
2269                    frame_number);
2270
2271            for (pendingRequestIterator k = mPendingRequestsList.begin();
2272                    k != mPendingRequestsList.end(); k++) {
2273
2274                if (k->frame_number == j->frame_number) {
2275                    CDBG("%s: Found reprocess frame number %d in pending reprocess List "
2276                            "Take it out!!", __func__,
2277                            k->frame_number);
2278
2279                    camera3_capture_result result;
2280                    memset(&result, 0, sizeof(camera3_capture_result));
2281                    result.frame_number = frame_number;
2282                    result.num_output_buffers = 1;
2283                    result.output_buffers =  &j->buffer;
2284                    result.input_buffer = k->input_buffer;
2285                    result.result = k->settings;
2286                    result.partial_result = PARTIAL_RESULT_COUNT;
2287                    mCallbackOps->process_capture_result(mCallbackOps, &result);
2288
2289                    erasePendingRequest(k);
2290                    break;
2291                }
2292            }
2293            mPendingReprocessResultList.erase(j);
2294            break;
2295        }
2296    }
2297    return NO_ERROR;
2298}
2299
2300/*===========================================================================
2301 * FUNCTION   : handleBatchMetadata
2302 *
2303 * DESCRIPTION: Handles metadata buffer callback in batch mode
2304 *
2305 * PARAMETERS : @metadata_buf: metadata buffer
2306 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2307 *                 the meta buf in this method
2308 *
2309 * RETURN     :
2310 *
2311 *==========================================================================*/
2312void QCamera3HardwareInterface::handleBatchMetadata(
2313        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2314{
2315    ATRACE_CALL();
2316
2317    if (NULL == metadata_buf) {
2318        ALOGE("%s: metadata_buf is NULL", __func__);
2319        return;
2320    }
2321    /* In batch mode, the metdata will contain the frame number and timestamp of
2322     * the last frame in the batch. Eg: a batch containing buffers from request
2323     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2324     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2325     * multiple process_capture_results */
2326    metadata_buffer_t *metadata =
2327            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2328    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2329    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2330    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2331    uint32_t frame_number = 0, urgent_frame_number = 0;
2332    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2333    bool invalid_metadata = false;
2334    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2335    size_t loopCount = 1;
2336
2337    int32_t *p_frame_number_valid =
2338            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2339    uint32_t *p_frame_number =
2340            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2341    int64_t *p_capture_time =
2342            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2343    int32_t *p_urgent_frame_number_valid =
2344            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2345    uint32_t *p_urgent_frame_number =
2346            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2347
2348    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2349            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2350            (NULL == p_urgent_frame_number)) {
2351        ALOGE("%s: Invalid metadata", __func__);
2352        invalid_metadata = true;
2353    } else {
2354        frame_number_valid = *p_frame_number_valid;
2355        last_frame_number = *p_frame_number;
2356        last_frame_capture_time = *p_capture_time;
2357        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2358        last_urgent_frame_number = *p_urgent_frame_number;
2359    }
2360
2361    /* In batchmode, when no video buffers are requested, set_parms are sent
2362     * for every capture_request. The difference between consecutive urgent
2363     * frame numbers and frame numbers should be used to interpolate the
2364     * corresponding frame numbers and time stamps */
2365    pthread_mutex_lock(&mMutex);
2366    if (urgent_frame_number_valid) {
2367        first_urgent_frame_number =
2368                mPendingBatchMap.valueFor(last_urgent_frame_number);
2369        urgentFrameNumDiff = last_urgent_frame_number + 1 -
2370                first_urgent_frame_number;
2371
2372        CDBG_HIGH("%s: urgent_frm: valid: %d frm_num: %d - %d",
2373                __func__, urgent_frame_number_valid,
2374                first_urgent_frame_number, last_urgent_frame_number);
2375    }
2376
2377    if (frame_number_valid) {
2378        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2379        frameNumDiff = last_frame_number + 1 -
2380                first_frame_number;
2381        mPendingBatchMap.removeItem(last_frame_number);
2382
2383        CDBG_HIGH("%s:        frm: valid: %d frm_num: %d - %d",
2384                __func__, frame_number_valid,
2385                first_frame_number, last_frame_number);
2386
2387    }
2388    pthread_mutex_unlock(&mMutex);
2389
2390    if (urgent_frame_number_valid || frame_number_valid) {
2391        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2392        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2393            ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
2394                    __func__, urgentFrameNumDiff, last_urgent_frame_number);
2395        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2396            ALOGE("%s: frameNumDiff: %d frameNum: %d",
2397                    __func__, frameNumDiff, last_frame_number);
2398    }
2399
2400    for (size_t i = 0; i < loopCount; i++) {
2401        /* handleMetadataWithLock is called even for invalid_metadata for
2402         * pipeline depth calculation */
2403        if (!invalid_metadata) {
2404            /* Infer frame number. Batch metadata contains frame number of the
2405             * last frame */
2406            if (urgent_frame_number_valid) {
2407                if (i < urgentFrameNumDiff) {
2408                    urgent_frame_number =
2409                            first_urgent_frame_number + i;
2410                    CDBG("%s: inferred urgent frame_number: %d",
2411                            __func__, urgent_frame_number);
2412                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2413                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2414                } else {
2415                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2416                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2417                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2418                }
2419            }
2420
2421            /* Infer frame number. Batch metadata contains frame number of the
2422             * last frame */
2423            if (frame_number_valid) {
2424                if (i < frameNumDiff) {
2425                    frame_number = first_frame_number + i;
2426                    CDBG("%s: inferred frame_number: %d", __func__, frame_number);
2427                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2428                            CAM_INTF_META_FRAME_NUMBER, frame_number);
2429                } else {
2430                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2431                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2432                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2433                }
2434            }
2435
2436            if (last_frame_capture_time) {
2437                //Infer timestamp
2438                first_frame_capture_time = last_frame_capture_time -
2439                        (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
2440                capture_time =
2441                        first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
2442                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2443                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2444                CDBG_HIGH("%s: batch capture_time: %lld, capture_time: %lld",
2445                        __func__, last_frame_capture_time, capture_time);
2446            }
2447        }
2448        pthread_mutex_lock(&mMutex);
2449        handleMetadataWithLock(metadata_buf,
2450                false /* free_and_bufdone_meta_buf */,
2451                (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
2452                (i == frameNumDiff-1) /* last metadata in the batch metadata */);
2453        pthread_mutex_unlock(&mMutex);
2454    }
2455
2456done_batch_metadata:
2457    /* BufDone metadata buffer */
2458    if (free_and_bufdone_meta_buf) {
2459        mMetadataChannel->bufDone(metadata_buf);
2460        free(metadata_buf);
2461    }
2462}
2463
2464/*===========================================================================
2465 * FUNCTION   : handleMetadataWithLock
2466 *
2467 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2468 *
2469 * PARAMETERS : @metadata_buf: metadata buffer
2470 *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2471 *                 the meta buf in this method
2472 *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
2473 *                  last urgent metadata in a batch. Always true for non-batch mode
2474 *              @lastMetadataInBatch: Boolean to indicate whether this is the
2475 *                  last metadata in a batch. Always true for non-batch mode
2476 *
2477 * RETURN     :
2478 *
2479 *==========================================================================*/
2480void QCamera3HardwareInterface::handleMetadataWithLock(
2481    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2482    bool lastUrgentMetadataInBatch, bool lastMetadataInBatch)
2483{
2484    ATRACE_CALL();
2485
2486    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2487    int32_t frame_number_valid, urgent_frame_number_valid;
2488    uint32_t frame_number, urgent_frame_number;
2489    int64_t capture_time;
2490    bool unfinished_raw_request = false;
2491
2492    int32_t *p_frame_number_valid =
2493            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2494    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2495    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2496    int32_t *p_urgent_frame_number_valid =
2497            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2498    uint32_t *p_urgent_frame_number =
2499            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2500    IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2501            metadata) {
2502        CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
2503                __func__, *p_frame_number_valid, *p_frame_number);
2504    }
2505
2506    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2507            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2508        ALOGE("%s: Invalid metadata", __func__);
2509        if (free_and_bufdone_meta_buf) {
2510            mMetadataChannel->bufDone(metadata_buf);
2511            free(metadata_buf);
2512        }
2513        goto done_metadata;
2514    } else {
2515        frame_number_valid = *p_frame_number_valid;
2516        frame_number = *p_frame_number;
2517        capture_time = *p_capture_time;
2518        urgent_frame_number_valid = *p_urgent_frame_number_valid;
2519        urgent_frame_number = *p_urgent_frame_number;
2520    }
2521    //Partial result on process_capture_result for timestamp
2522    if (urgent_frame_number_valid) {
2523        CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
2524          __func__, urgent_frame_number, capture_time);
2525
2526        //Recieved an urgent Frame Number, handle it
2527        //using partial results
2528        for (pendingRequestIterator i =
2529                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2530            CDBG("%s: Iterator Frame = %d urgent frame = %d",
2531                __func__, i->frame_number, urgent_frame_number);
2532
2533            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2534                (i->partial_result_cnt == 0)) {
2535                ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
2536                    __func__, i->frame_number);
2537                i->partial_result_cnt++;
2538            }
2539
2540            if (i->frame_number == urgent_frame_number &&
2541                     i->bUrgentReceived == 0) {
2542
2543                camera3_capture_result_t result;
2544                memset(&result, 0, sizeof(camera3_capture_result_t));
2545
2546                i->partial_result_cnt++;
2547                i->bUrgentReceived = 1;
2548                // Extract 3A metadata
2549                result.result = translateCbUrgentMetadataToResultMetadata(
2550                        metadata, lastUrgentMetadataInBatch);
2551                // Populate metadata result
2552                result.frame_number = urgent_frame_number;
2553                result.num_output_buffers = 0;
2554                result.output_buffers = NULL;
2555                result.partial_result = i->partial_result_cnt;
2556
2557                mCallbackOps->process_capture_result(mCallbackOps, &result);
2558                CDBG("%s: urgent frame_number = %u, capture_time = %lld",
2559                     __func__, result.frame_number, capture_time);
2560                free_camera_metadata((camera_metadata_t *)result.result);
2561                break;
2562            }
2563        }
2564    }
2565
2566    if (!frame_number_valid) {
2567        CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
2568        if (free_and_bufdone_meta_buf) {
2569            mMetadataChannel->bufDone(metadata_buf);
2570            free(metadata_buf);
2571        }
2572        goto done_metadata;
2573    }
2574    CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
2575            frame_number, capture_time);
2576
2577    for (pendingRequestIterator i = mPendingRequestsList.begin();
2578            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2579        // Flush out all entries with less or equal frame numbers.
2580
2581        camera3_capture_result_t result;
2582        memset(&result, 0, sizeof(camera3_capture_result_t));
2583
2584        CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
2585
2586        // Check whether any stream buffer corresponding to this is dropped or not
2587        // If dropped, then send the ERROR_BUFFER for the corresponding stream
2588        // The API does not expect a blob buffer to be dropped
2589        if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
2590            /* Clear notify_msg structure */
2591            camera3_notify_msg_t notify_msg;
2592            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2593            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2594                    j != i->buffers.end(); j++) {
2595                QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2596                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2597                for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
2598                    if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
2599                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2600                        ALOGW("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
2601                                __func__, i->frame_number, streamID, j->stream->format);
2602                        notify_msg.type = CAMERA3_MSG_ERROR;
2603                        notify_msg.message.error.frame_number = i->frame_number;
2604                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2605                        notify_msg.message.error.error_stream = j->stream;
2606                        mCallbackOps->notify(mCallbackOps, &notify_msg);
2607                        ALOGW("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
2608                                __func__, i->frame_number, streamID, j->stream->format);
2609                        PendingFrameDropInfo PendingFrameDrop;
2610                        PendingFrameDrop.frame_number=i->frame_number;
2611                        PendingFrameDrop.stream_ID = streamID;
2612                        // Add the Frame drop info to mPendingFrameDropList
2613                        mPendingFrameDropList.push_back(PendingFrameDrop);
2614                   }
2615               }
2616            }
2617        }
2618
2619        // Send empty metadata with already filled buffers for dropped metadata
2620        // and send valid metadata with already filled buffers for current metadata
2621        /* we could hit this case when we either
2622         * 1. have a pending reprocess request or
2623         * 2. miss a metadata buffer callback */
2624        if (i->frame_number < frame_number) {
2625            if (i->input_buffer) {
2626                /* this will be handled in handleInputBufferWithLock */
2627                i++;
2628                continue;
2629            } else if (i->need_dynamic_blklvl) {
2630                unfinished_raw_request = true;
2631                // i->partial_result_cnt--;
2632                CDBG("%s, frame number:%d, partial_result:%d, unfinished raw request..",
2633                        __func__, i->frame_number, i->partial_result_cnt);
2634                i++;
2635                continue;
2636            } else if (i->pending_extra_result) {
2637                CDBG("%s, frame_number:%d, partial_result:%d, need_dynamic_blklvl:%d",
2638                        __func__, i->frame_number, i->partial_result_cnt,
2639                        i->need_dynamic_blklvl);
2640                // i->partial_result_cnt--;
2641                i++;
2642                continue;
2643            } else {
2644                ALOGE("%s: Missing metadata buffer for frame number %d, reporting CAMERA3_MSG_ERROR_RESULT",
2645                     __func__, i->frame_number);
2646
2647                CameraMetadata dummyMetadata;
2648                dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
2649                result.result = dummyMetadata.release();
2650
2651                camera3_notify_msg_t notify_msg;
2652                memset(&notify_msg, 0, sizeof(notify_msg));
2653                notify_msg.type = CAMERA3_MSG_ERROR;
2654                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
2655                notify_msg.message.error.error_stream = NULL;
2656                notify_msg.message.error.frame_number = i->frame_number;
2657                mCallbackOps->notify(mCallbackOps, &notify_msg);
2658
2659                // partial_result should be PARTIAL_RESULT_CNT in case of
2660                // ERROR_RESULT.
2661                i->partial_result_cnt = PARTIAL_RESULT_COUNT;
2662                result.partial_result = PARTIAL_RESULT_COUNT;
2663            }
2664        } else {
2665            i->partial_result_cnt++;
2666            CDBG("%s, frame_number:%d, need_dynamic_blklvl:%d, partial cnt:%d\n",
2667                    __func__, i->frame_number, i->need_dynamic_blklvl,
2668                    i->partial_result_cnt);
2669            if (!i->need_dynamic_blklvl) {
2670                CDBG("%s, meta for request without raw, frame number: %d\n",
2671                        __func__, i->frame_number);
2672                if (!unfinished_raw_request) {
2673                    i->partial_result_cnt++;
2674                    CDBG("%s, no raw request pending, send the final (cnt:%d) partial result",
2675                            __func__, i->partial_result_cnt);
2676                }
2677            }
2678
2679            result.partial_result = i->partial_result_cnt;
2680
2681            /* Clear notify_msg structure */
2682            camera3_notify_msg_t notify_msg;
2683            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2684
2685            // Send shutter notify to frameworks
2686            notify_msg.type = CAMERA3_MSG_SHUTTER;
2687            notify_msg.message.shutter.frame_number = i->frame_number;
2688            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2689            mCallbackOps->notify(mCallbackOps, &notify_msg);
2690
2691            i->timestamp = capture_time;
2692
2693            // Find channel requiring metadata, meaning internal offline postprocess
2694            // is needed.
2695            //TODO: for now, we don't support two streams requiring metadata at the same time.
2696            // (because we are not making copies, and metadata buffer is not reference counted.
2697            bool internalPproc = false;
2698            for (pendingBufferIterator iter = i->buffers.begin();
2699                    iter != i->buffers.end(); iter++) {
2700                if (iter->need_metadata) {
2701                    internalPproc = true;
2702                    QCamera3ProcessingChannel *channel =
2703                            (QCamera3ProcessingChannel *)iter->stream->priv;
2704                    channel->queueReprocMetadata(metadata_buf);
2705                    break;
2706                }
2707            }
2708
2709            result.result = translateFromHalMetadata(metadata,
2710                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
2711                    i->capture_intent, i->hybrid_ae_enable, internalPproc, i->need_dynamic_blklvl,
2712                    lastMetadataInBatch);
2713
2714            saveExifParams(metadata);
2715
2716            if (i->blob_request) {
2717                {
2718                    //Dump tuning metadata if enabled and available
2719                    char prop[PROPERTY_VALUE_MAX];
2720                    memset(prop, 0, sizeof(prop));
2721                    property_get("persist.camera.dumpmetadata", prop, "0");
2722                    int32_t enabled = atoi(prop);
2723                    if (enabled && metadata->is_tuning_params_valid) {
2724                        dumpMetadataToFile(metadata->tuning_params,
2725                               mMetaFrameCount,
2726                               enabled,
2727                               "Snapshot",
2728                               frame_number);
2729                    }
2730                }
2731            }
2732
2733            if (!internalPproc) {
2734                CDBG("%s: couldn't find need_metadata for this metadata", __func__);
2735                // Return metadata buffer
2736                if (free_and_bufdone_meta_buf) {
2737                    mMetadataChannel->bufDone(metadata_buf);
2738                    free(metadata_buf);
2739                }
2740            }
2741        }
2742        if (!result.result) {
2743            ALOGE("%s: metadata is NULL", __func__);
2744        }
2745        result.frame_number = i->frame_number;
2746        result.input_buffer = i->input_buffer;
2747        result.num_output_buffers = 0;
2748        result.output_buffers = NULL;
2749        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2750                    j != i->buffers.end(); j++) {
2751            if (j->buffer) {
2752                result.num_output_buffers++;
2753            }
2754        }
2755
2756        if (result.num_output_buffers > 0) {
2757            camera3_stream_buffer_t *result_buffers =
2758                new camera3_stream_buffer_t[result.num_output_buffers];
2759            if (!result_buffers) {
2760                ALOGE("%s: Fatal error: out of memory", __func__);
2761            }
2762            size_t result_buffers_idx = 0;
2763            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2764                    j != i->buffers.end(); j++) {
2765                if (j->buffer) {
2766                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
2767                            m != mPendingFrameDropList.end(); m++) {
2768                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
2769                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2770                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
2771                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
2772                            ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
2773                                  __func__, frame_number, streamID);
2774                            m = mPendingFrameDropList.erase(m);
2775                            break;
2776                        }
2777                    }
2778
2779                    for (List<PendingBufferInfo>::iterator k =
2780                      mPendingBuffersMap.mPendingBufferList.begin();
2781                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
2782                      if (k->buffer == j->buffer->buffer) {
2783                        CDBG("%s: Found buffer %p in pending buffer List "
2784                              "for frame %u, Take it out!!", __func__,
2785                               k->buffer, k->frame_number);
2786                        mPendingBuffersMap.num_buffers--;
2787                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
2788                        break;
2789                      }
2790                    }
2791
2792                    result_buffers[result_buffers_idx++] = *(j->buffer);
2793                    free(j->buffer);
2794                    j->buffer = NULL;
2795                }
2796            }
2797            result.output_buffers = result_buffers;
2798            mCallbackOps->process_capture_result(mCallbackOps, &result);
2799            CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
2800                    __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
2801            free_camera_metadata((camera_metadata_t *)result.result);
2802            delete[] result_buffers;
2803        } else {
2804            mCallbackOps->process_capture_result(mCallbackOps, &result);
2805            CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
2806                        __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
2807            free_camera_metadata((camera_metadata_t *)result.result);
2808        }
2809
2810        if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
2811            mPendingLiveRequest--;
2812            i = erasePendingRequest(i);
2813        } else {
2814            CDBG("%s, keep in list, frame number:%d, partial result:%d",
2815                    __func__, i->frame_number, i->partial_result_cnt);
2816            i->pending_extra_result = true;
2817            i++;
2818        }
2819
2820        if (!mPendingReprocessResultList.empty()) {
2821            handlePendingReprocResults(frame_number + 1);
2822        }
2823
2824    }
2825
2826done_metadata:
2827    for (pendingRequestIterator i = mPendingRequestsList.begin();
2828            i != mPendingRequestsList.end() ;i++) {
2829        i->pipeline_depth++;
2830    }
2831    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
2832    unblockRequestIfNecessary();
2833
2834}
2835
2836/*===========================================================================
2837 * FUNCTION   : hdrPlusPerfLock
2838 *
2839 * DESCRIPTION: perf lock for HDR+ using custom intent
2840 *
2841 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
2842 *
2843 * RETURN     : None
2844 *
2845 *==========================================================================*/
2846void QCamera3HardwareInterface::hdrPlusPerfLock(
2847        mm_camera_super_buf_t *metadata_buf)
2848{
2849    if (NULL == metadata_buf) {
2850        ALOGE("%s: metadata_buf is NULL", __func__);
2851        return;
2852    }
2853    metadata_buffer_t *metadata =
2854            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2855    int32_t *p_frame_number_valid =
2856            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2857    uint32_t *p_frame_number =
2858            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2859
2860    //acquire perf lock for 5 sec after the last HDR frame is captured
2861    if (*p_frame_number_valid) {
2862        if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
2863            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
2864        }
2865    }
2866
2867    //release lock after perf lock timer is expired. If lock is already released,
2868    //isTimerReset returns false
2869    if (m_perfLock.isTimerReset()) {
2870        mLastCustIntentFrmNum = -1;
2871        m_perfLock.lock_rel_timed();
2872    }
2873}
2874
2875/*===========================================================================
2876 * FUNCTION   : handleInputBufferWithLock
2877 *
2878 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
2879 *
2880 * PARAMETERS :
2881 *  @buffer: contains status information about the processed buffer
2882 *  @frame_number: frame number of the input buffer
2883 *
2884 * RETURN     :
2885 *
2886 *==========================================================================*/
2887void QCamera3HardwareInterface::handleInputBufferWithLock(
2888        camera3_stream_buffer_t *buffer, uint32_t frame_number)
2889{
2890    ATRACE_CALL();
2891    pendingRequestIterator i = mPendingRequestsList.begin();
2892    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
2893        i++;
2894    }
2895    if (i != mPendingRequestsList.end() && i->input_buffer) {
2896        //found the right request
2897        if (!i->shutter_notified) {
2898            CameraMetadata settings;
2899            camera3_notify_msg_t notify_msg;
2900            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2901            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
2902            if(i->settings) {
2903                settings = i->settings;
2904                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
2905                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
2906                } else {
2907                    ALOGE("%s: No timestamp in input settings! Using current one.",
2908                            __func__);
2909                }
2910            } else {
2911                ALOGE("%s: Input settings missing!", __func__);
2912            }
2913
2914            notify_msg.type = CAMERA3_MSG_SHUTTER;
2915            notify_msg.message.shutter.frame_number = frame_number;
2916            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
2917            mCallbackOps->notify(mCallbackOps, &notify_msg);
2918            i->shutter_notified = true;
2919            CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
2920                       __func__, i->frame_number, notify_msg.message.shutter.timestamp);
2921        }
2922
2923        if (i->input_buffer->release_fence != -1) {
2924           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
2925           close(i->input_buffer->release_fence);
2926           if (rc != OK) {
2927               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
2928           }
2929        }
2930
2931        if ((nullptr != buffer) && (CAMERA3_BUFFER_STATUS_OK != buffer->status)) {
2932            camera3_notify_msg_t notify_msg;
2933            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2934            notify_msg.type = CAMERA3_MSG_ERROR;
2935            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
2936            notify_msg.message.error.error_stream = NULL;
2937            notify_msg.message.error.frame_number = frame_number;
2938            mCallbackOps->notify(mCallbackOps, &notify_msg);
2939
2940            Vector<camera3_stream_buffer_t> pendingBuffers;
2941            camera3_stream_buffer_t pending;
2942            memset(&pending, 0, sizeof(pending));
2943            pending.acquire_fence = -1;
2944            pending.release_fence = -1;
2945            pending.status = CAMERA3_BUFFER_STATUS_ERROR;
2946            for (List<PendingBufferInfo>::iterator k =
2947                    mPendingBuffersMap.mPendingBufferList.begin();
2948                    k != mPendingBuffersMap.mPendingBufferList.end();) {
2949                if (k->frame_number == frame_number) {
2950                    pending.buffer = k->buffer;
2951                    pending.stream = k->stream;
2952                    pendingBuffers.add(pending);
2953
2954                    mPendingBuffersMap.num_buffers--;
2955                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
2956                } else {
2957                    k++;
2958                }
2959            }
2960
2961            camera3_capture_result result;
2962            memset(&result, 0, sizeof(camera3_capture_result));
2963            result.input_buffer = i->input_buffer;
2964            result.num_output_buffers = pendingBuffers.size();
2965            result.output_buffers = pendingBuffers.array();
2966            result.result = NULL;
2967            result.frame_number = frame_number;
2968            mCallbackOps->process_capture_result(mCallbackOps, &result);
2969        } else {
2970            camera3_capture_result result;
2971            memset(&result, 0, sizeof(camera3_capture_result));
2972            result.frame_number = frame_number;
2973            result.result = i->settings;
2974            result.input_buffer = i->input_buffer;
2975
2976            result.partial_result = PARTIAL_RESULT_COUNT;
2977
2978            mCallbackOps->process_capture_result(mCallbackOps, &result);
2979        }
2980        CDBG("%s: Input request metadata and input buffer frame_number = %u",
2981                       __func__, i->frame_number);
2982        i = erasePendingRequest(i);
2983    } else {
2984        ALOGE("%s: Could not find input request for frame number %d", __func__, frame_number);
2985    }
2986}
2987
2988bool QCamera3HardwareInterface::getBlackLevelRegion(int (&opticalBlackRegions)[4])
2989{
2990    if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
2991        /*just calculate one region black level and send to fwk*/
2992        for (size_t i = 0; i <  4; i++) {
2993            opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
2994        }
2995        return TRUE;
2996    }
2997
2998    return FALSE;
2999}
3000
3001void QCamera3HardwareInterface::sendDynamicBlackLevel(float blacklevel[4], uint32_t frame_number)
3002{
3003    CDBG("%s, E.\n", __func__);
3004    pthread_mutex_lock(&mMutex);
3005    sendDynamicBlackLevelWithLock(blacklevel, frame_number);
3006    pthread_mutex_unlock(&mMutex);
3007    CDBG("%s, X.\n", __func__);
3008}
3009
3010void QCamera3HardwareInterface::sendDynamicBlackLevelWithLock(float blacklevel[4], uint32_t frame_number)
3011{
3012    CDBG("%s, E. frame_number:%d\n", __func__, frame_number);
3013
3014    pendingRequestIterator i = mPendingRequestsList.begin();
3015    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3016        i++;
3017    }
3018    if ((i == mPendingRequestsList.end()) || !i->need_dynamic_blklvl) {
3019        ALOGE("%s, error: invalid frame number.", __func__);
3020        return;
3021    }
3022
3023    i->partial_result_cnt++;
3024
3025    CameraMetadata camMetadata;
3026    int64_t fwk_frame_number = (int64_t)frame_number;
3027    camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
3028
3029    // update dynamic black level here
3030    camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, blacklevel, 4);
3031
3032    camera3_capture_result_t result;
3033    memset(&result, 0, sizeof(camera3_capture_result_t));
3034    result.frame_number = frame_number;
3035    result.num_output_buffers = 0;
3036    result.result = camMetadata.release();
3037    result.partial_result = i->partial_result_cnt;
3038
3039    CDBG("%s, partial result:%d, frame_number:%d, pending extra result:%d\n",
3040            __func__, result.partial_result, frame_number, i->pending_extra_result);
3041    mCallbackOps->process_capture_result(mCallbackOps, &result);
3042    free_camera_metadata((camera_metadata_t *)result.result);
3043
3044    if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
3045        CDBG("%s, remove cur request from pending list.", __func__);
3046        mPendingLiveRequest--;
3047        i = erasePendingRequest(i);
3048
3049        // traverse the remaining pending list to see whether need to send cached ones..
3050        while (i != mPendingRequestsList.end()) {
3051            CDBG("%s, frame number:%d, partial_result:%d, pending extra result:%d",
3052                    __func__, i->frame_number, i->partial_result_cnt,
3053                    i->pending_extra_result);
3054
3055            if ((i->partial_result_cnt == PARTIAL_RESULT_COUNT - 1)
3056                    && (i->need_dynamic_blklvl == false) /* in case two consecutive raw requests */) {
3057                // send out final result, and remove it from pending list.
3058                CameraMetadata camMetadata;
3059                int64_t fwk_frame_number = (int64_t)i->frame_number;
3060                camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
3061
3062                memset(&result, 0, sizeof(camera3_capture_result_t));
3063                result.frame_number = i->frame_number;
3064                result.num_output_buffers = 0;
3065                result.result = camMetadata.release();
3066                result.partial_result = i->partial_result_cnt + 1;
3067
3068                mCallbackOps->process_capture_result(mCallbackOps, &result);
3069                free_camera_metadata((camera_metadata_t *)result.result);
3070
3071                mPendingLiveRequest--;
3072                i = erasePendingRequest(i);
3073                CDBG("%s, mPendingLiveRequest:%d, pending list size:%d",
3074                        __func__, mPendingLiveRequest, mPendingRequestsList.size());
3075            } else {
3076                break;
3077            }
3078        }
3079    }
3080
3081    unblockRequestIfNecessary();
3082    CDBG("%s, X.mPendingLiveRequest = %d\n", __func__, mPendingLiveRequest);
3083}
3084
3085
3086/*===========================================================================
3087 * FUNCTION   : handleBufferWithLock
3088 *
3089 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3090 *
3091 * PARAMETERS : @buffer: image buffer for the callback
3092 *              @frame_number: frame number of the image buffer
3093 *
3094 * RETURN     :
3095 *
3096 *==========================================================================*/
3097void QCamera3HardwareInterface::handleBufferWithLock(
3098    camera3_stream_buffer_t *buffer, uint32_t frame_number)
3099{
3100    ATRACE_CALL();
3101    // If the frame number doesn't exist in the pending request list,
3102    // directly send the buffer to the frameworks, and update pending buffers map
3103    // Otherwise, book-keep the buffer.
3104    pendingRequestIterator i = mPendingRequestsList.begin();
3105    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3106        i++;
3107    }
3108    if (i == mPendingRequestsList.end() || i->pending_extra_result == true) {
3109        if (i != mPendingRequestsList.end()) {
3110            // though the pendingRequestInfo is still in the list,
3111            // still send the buffer directly, as the pending_extra_result is true,
3112            // and we've already received meta for this frame number.
3113            CDBG("%s, send the buffer directly, frame number:%d",
3114                    __func__, i->frame_number);
3115        }
3116        // Verify all pending requests frame_numbers are greater
3117        for (pendingRequestIterator j = mPendingRequestsList.begin();
3118                j != mPendingRequestsList.end(); j++) {
3119            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3120                ALOGE("%s: Error: pending live frame number %d is smaller than %d",
3121                        __func__, j->frame_number, frame_number);
3122            }
3123        }
3124        camera3_capture_result_t result;
3125        memset(&result, 0, sizeof(camera3_capture_result_t));
3126        result.result = NULL;
3127        result.frame_number = frame_number;
3128        result.num_output_buffers = 1;
3129        result.partial_result = 0;
3130        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3131                m != mPendingFrameDropList.end(); m++) {
3132            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3133            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3134            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3135                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3136                CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
3137                        __func__, frame_number, streamID);
3138                m = mPendingFrameDropList.erase(m);
3139                break;
3140            }
3141        }
3142        result.output_buffers = buffer;
3143        CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
3144                __func__, frame_number, buffer->buffer);
3145
3146        for (List<PendingBufferInfo>::iterator k =
3147                mPendingBuffersMap.mPendingBufferList.begin();
3148                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3149            if (k->buffer == buffer->buffer) {
3150                CDBG("%s: Found Frame buffer, take it out from list",
3151                        __func__);
3152
3153                mPendingBuffersMap.num_buffers--;
3154                k = mPendingBuffersMap.mPendingBufferList.erase(k);
3155                break;
3156            }
3157        }
3158        CDBG("%s: mPendingBuffersMap.num_buffers = %d",
3159            __func__, mPendingBuffersMap.num_buffers);
3160
3161        mCallbackOps->process_capture_result(mCallbackOps, &result);
3162    } else {
3163        if (i->input_buffer) {
3164            CameraMetadata settings;
3165            camera3_notify_msg_t notify_msg;
3166            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3167            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3168            if(i->settings) {
3169                settings = i->settings;
3170                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3171                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3172                } else {
3173                    ALOGE("%s: No timestamp in input settings! Using current one.",
3174                            __func__);
3175                }
3176            } else {
3177                ALOGE("%s: Input settings missing!", __func__);
3178            }
3179
3180            notify_msg.type = CAMERA3_MSG_SHUTTER;
3181            notify_msg.message.shutter.frame_number = frame_number;
3182            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3183
3184            if (i->input_buffer->release_fence != -1) {
3185               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3186               close(i->input_buffer->release_fence);
3187               if (rc != OK) {
3188               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3189               }
3190            }
3191
3192            for (List<PendingBufferInfo>::iterator k =
3193                    mPendingBuffersMap.mPendingBufferList.begin();
3194                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
3195                if (k->buffer == buffer->buffer) {
3196                    CDBG("%s: Found Frame buffer, take it out from list",
3197                            __func__);
3198
3199                    mPendingBuffersMap.num_buffers--;
3200                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
3201                    break;
3202                }
3203            }
3204            CDBG("%s: mPendingBuffersMap.num_buffers = %d",
3205                __func__, mPendingBuffersMap.num_buffers);
3206
3207            bool notifyNow = true;
3208            for (pendingRequestIterator j = mPendingRequestsList.begin();
3209                    j != mPendingRequestsList.end(); j++) {
3210                if (j->frame_number < frame_number) {
3211                    notifyNow = false;
3212                    break;
3213                }
3214            }
3215
3216            if (notifyNow) {
3217                camera3_capture_result result;
3218                memset(&result, 0, sizeof(camera3_capture_result));
3219                result.frame_number = frame_number;
3220                result.result = i->settings;
3221                result.input_buffer = i->input_buffer;
3222                result.num_output_buffers = 1;
3223                result.output_buffers = buffer;
3224                result.partial_result = PARTIAL_RESULT_COUNT;
3225
3226                mCallbackOps->notify(mCallbackOps, &notify_msg);
3227                mCallbackOps->process_capture_result(mCallbackOps, &result);
3228                CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
3229                i = erasePendingRequest(i);
3230            } else {
3231                // Cache reprocess result for later
3232                PendingReprocessResult pendingResult;
3233                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
3234                pendingResult.notify_msg = notify_msg;
3235                pendingResult.buffer = *buffer;
3236                pendingResult.frame_number = frame_number;
3237                mPendingReprocessResultList.push_back(pendingResult);
3238                CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
3239            }
3240        } else {
3241            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3242                j != i->buffers.end(); j++) {
3243                if (j->stream == buffer->stream) {
3244                    if (j->buffer != NULL) {
3245                        ALOGE("%s: Error: buffer is already set", __func__);
3246                    } else {
3247                        j->buffer = (camera3_stream_buffer_t *)malloc(
3248                            sizeof(camera3_stream_buffer_t));
3249                        *(j->buffer) = *buffer;
3250                        CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
3251                            __func__, buffer, frame_number);
3252                    }
3253                }
3254            }
3255        }
3256    }
3257}
3258
3259/*===========================================================================
3260 * FUNCTION   : unblockRequestIfNecessary
3261 *
3262 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3263 *              that mMutex is held when this function is called.
3264 *
3265 * PARAMETERS :
3266 *
3267 * RETURN     :
3268 *
3269 *==========================================================================*/
3270void QCamera3HardwareInterface::unblockRequestIfNecessary()
3271{
3272   // Unblock process_capture_request
3273   pthread_cond_signal(&mRequestCond);
3274}
3275
3276
3277/*===========================================================================
3278 * FUNCTION   : processCaptureRequest
3279 *
3280 * DESCRIPTION: process a capture request from camera service
3281 *
3282 * PARAMETERS :
3283 *   @request : request from framework to process
3284 *
3285 * RETURN     :
3286 *
3287 *==========================================================================*/
3288int QCamera3HardwareInterface::processCaptureRequest(
3289                    camera3_capture_request_t *request)
3290{
3291    ATRACE_CALL();
3292    int rc = NO_ERROR;
3293    int32_t request_id;
3294    CameraMetadata meta;
3295    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
3296    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
3297    bool isVidBufRequested = false;
3298    camera3_stream_buffer_t *pInputBuffer = NULL;
3299
3300    pthread_mutex_lock(&mMutex);
3301
3302    rc = validateCaptureRequest(request);
3303    if (rc != NO_ERROR) {
3304        ALOGE("%s: incoming request is not valid", __func__);
3305        pthread_mutex_unlock(&mMutex);
3306        return rc;
3307    }
3308
3309    meta = request->settings;
3310
3311    // For first capture request, send capture intent, and
3312    // stream on all streams
3313    if (mFirstRequest) {
3314        // send an unconfigure to the backend so that the isp
3315        // resources are deallocated
3316        if (!mFirstConfiguration) {
3317            cam_stream_size_info_t stream_config_info;
3318            int32_t hal_version = CAM_HAL_V3;
3319            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3320            stream_config_info.buffer_info.min_buffers =
3321                    MIN_INFLIGHT_REQUESTS;
3322            stream_config_info.buffer_info.max_buffers =
3323                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3324            clear_metadata_buffer(mParameters);
3325            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3326                    CAM_INTF_PARM_HAL_VERSION, hal_version);
3327            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3328                    CAM_INTF_META_STREAM_INFO, stream_config_info);
3329            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3330                    mParameters);
3331            if (rc < 0) {
3332                ALOGE("%s: set_parms for unconfigure failed", __func__);
3333                pthread_mutex_unlock(&mMutex);
3334                return rc;
3335            }
3336        }
3337        m_perfLock.lock_acq();
3338        /* get eis information for stream configuration */
3339        cam_is_type_t is_type;
3340        char is_type_value[PROPERTY_VALUE_MAX];
3341        property_get("persist.camera.is_type", is_type_value, "0");
3342        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
3343
3344        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3345            int32_t hal_version = CAM_HAL_V3;
3346            uint8_t captureIntent =
3347                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3348            mCaptureIntent = captureIntent;
3349            clear_metadata_buffer(mParameters);
3350            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3351            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3352        }
3353
3354        //If EIS is enabled, turn it on for video
3355        bool setEis = m_bEisEnable && m_bEisSupportedSize;
3356        int32_t vsMode;
3357        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3358        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3359            rc = BAD_VALUE;
3360        }
3361
3362        //IS type will be 0 unless EIS is supported. If EIS is supported
3363        //it could either be 1 or 4 depending on the stream and video size
3364        if (setEis) {
3365            if (!m_bEisSupportedSize) {
3366                is_type = IS_TYPE_DIS;
3367            } else {
3368                is_type = IS_TYPE_EIS_2_0;
3369            }
3370            mStreamConfigInfo.is_type = is_type;
3371        } else {
3372            mStreamConfigInfo.is_type = IS_TYPE_NONE;
3373        }
3374
3375        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3376                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3377        int32_t tintless_value = 1;
3378        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3379                CAM_INTF_PARM_TINTLESS, tintless_value);
3380        //Disable CDS for HFR mode and if mPprocBypass = true.
3381        //CDS is a session parameter in the backend/ISP, so need to be set/reset
3382        //after every configure_stream
3383        if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3384                (m_bIsVideo)) {
3385            int32_t cds = CAM_CDS_MODE_OFF;
3386            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3387                    CAM_INTF_PARM_CDS_MODE, cds))
3388                ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
3389
3390        }
3391        setMobicat();
3392
3393        /* Set fps and hfr mode while sending meta stream info so that sensor
3394         * can configure appropriate streaming mode */
3395        mHFRVideoFps = DEFAULT_VIDEO_FPS;
3396        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3397            rc = setHalFpsRange(meta, mParameters);
3398            if (rc != NO_ERROR) {
3399                ALOGE("%s: setHalFpsRange failed", __func__);
3400            }
3401        }
3402        if (meta.exists(ANDROID_CONTROL_MODE)) {
3403            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3404            rc = extractSceneMode(meta, metaMode, mParameters);
3405            if (rc != NO_ERROR) {
3406                ALOGE("%s: extractSceneMode failed", __func__);
3407            }
3408        }
3409
3410        //TODO: validate the arguments, HSV scenemode should have only the
3411        //advertised fps ranges
3412
3413        /*set the capture intent, hal version, tintless, stream info,
3414         *and disenable parameters to the backend*/
3415        CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
3416        mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3417                    mParameters);
3418
3419        cam_dimension_t sensor_dim;
3420        memset(&sensor_dim, 0, sizeof(sensor_dim));
3421        rc = getSensorOutputSize(sensor_dim);
3422        if (rc != NO_ERROR) {
3423            ALOGE("%s: Failed to get sensor output size", __func__);
3424            pthread_mutex_unlock(&mMutex);
3425            goto error_exit;
3426        }
3427
3428        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3429                gCamCapability[mCameraId]->active_array_size.height,
3430                sensor_dim.width, sensor_dim.height);
3431
3432        /* Set batchmode before initializing channel. Since registerBuffer
3433         * internally initializes some of the channels, better set batchmode
3434         * even before first register buffer */
3435        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3436            it != mStreamInfo.end(); it++) {
3437            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3438            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3439                    && mBatchSize) {
3440                rc = channel->setBatchSize(mBatchSize);
3441                //Disable per frame map unmap for HFR/batchmode case
3442                rc |= channel->setPerFrameMapUnmap(false);
3443                if (NO_ERROR != rc) {
3444                    ALOGE("%s : Channel init failed %d", __func__, rc);
3445                    pthread_mutex_unlock(&mMutex);
3446                    goto error_exit;
3447                }
3448            }
3449        }
3450
3451        //First initialize all streams
3452        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3453            it != mStreamInfo.end(); it++) {
3454            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3455            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3456               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
3457               setEis)
3458                rc = channel->initialize(is_type);
3459            else {
3460                rc = channel->initialize(IS_TYPE_NONE);
3461            }
3462            if (NO_ERROR != rc) {
3463                ALOGE("%s : Channel initialization failed %d", __func__, rc);
3464                pthread_mutex_unlock(&mMutex);
3465                goto error_exit;
3466            }
3467        }
3468
3469        if (mRawDumpChannel) {
3470            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3471            if (rc != NO_ERROR) {
3472                ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
3473                pthread_mutex_unlock(&mMutex);
3474                goto error_exit;
3475            }
3476        }
3477        if (mSupportChannel) {
3478            rc = mSupportChannel->initialize(IS_TYPE_NONE);
3479            if (rc < 0) {
3480                ALOGE("%s: Support channel initialization failed", __func__);
3481                pthread_mutex_unlock(&mMutex);
3482                goto error_exit;
3483            }
3484        }
3485        if (mAnalysisChannel) {
3486            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3487            if (rc < 0) {
3488                ALOGE("%s: Analysis channel initialization failed", __func__);
3489                pthread_mutex_unlock(&mMutex);
3490                goto error_exit;
3491            }
3492        }
3493        if (mDummyBatchChannel) {
3494            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3495            if (rc < 0) {
3496                ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
3497                pthread_mutex_unlock(&mMutex);
3498                goto error_exit;
3499            }
3500            rc = mDummyBatchChannel->initialize(is_type);
3501            if (rc < 0) {
3502                ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
3503                pthread_mutex_unlock(&mMutex);
3504                goto error_exit;
3505            }
3506        }
3507
3508        // Set bundle info
3509        rc = setBundleInfo();
3510        if (rc < 0) {
3511            ALOGE("%s: setBundleInfo failed %d", __func__, rc);
3512            pthread_mutex_unlock(&mMutex);
3513            goto error_exit;
3514        }
3515
3516        //Then start them.
3517        CDBG_HIGH("%s: Start META Channel", __func__);
3518        rc = mMetadataChannel->start();
3519        if (rc < 0) {
3520            ALOGE("%s: META channel start failed", __func__);
3521            pthread_mutex_unlock(&mMutex);
3522            goto error_exit;
3523        }
3524
3525        if (mAnalysisChannel) {
3526            rc = mAnalysisChannel->start();
3527            if (rc < 0) {
3528                ALOGE("%s: Analysis channel start failed", __func__);
3529                mMetadataChannel->stop();
3530                pthread_mutex_unlock(&mMutex);
3531                goto error_exit;
3532            }
3533        }
3534
3535        if (mSupportChannel) {
3536            rc = mSupportChannel->start();
3537            if (rc < 0) {
3538                ALOGE("%s: Support channel start failed", __func__);
3539                mMetadataChannel->stop();
3540                /* Although support and analysis are mutually exclusive today
3541                   adding it in anycase for future proofing */
3542                if (mAnalysisChannel) {
3543                    mAnalysisChannel->stop();
3544                }
3545                pthread_mutex_unlock(&mMutex);
3546                goto error_exit;
3547            }
3548        }
3549        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3550            it != mStreamInfo.end(); it++) {
3551            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3552            CDBG_HIGH("%s: Start Processing Channel mask=%d",
3553                    __func__, channel->getStreamTypeMask());
3554            rc = channel->start();
3555            if (rc < 0) {
3556                ALOGE("%s: channel start failed", __func__);
3557                pthread_mutex_unlock(&mMutex);
3558                goto error_exit;
3559            }
3560        }
3561
3562        if (mRawDumpChannel) {
3563            CDBG("%s: Starting raw dump stream",__func__);
3564            rc = mRawDumpChannel->start();
3565            if (rc != NO_ERROR) {
3566                ALOGE("%s: Error Starting Raw Dump Channel", __func__);
3567                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3568                      it != mStreamInfo.end(); it++) {
3569                    QCamera3Channel *channel =
3570                        (QCamera3Channel *)(*it)->stream->priv;
3571                    ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
3572                        channel->getStreamTypeMask());
3573                    channel->stop();
3574                }
3575                if (mSupportChannel)
3576                    mSupportChannel->stop();
3577                if (mAnalysisChannel) {
3578                    mAnalysisChannel->stop();
3579                }
3580                mMetadataChannel->stop();
3581                pthread_mutex_unlock(&mMutex);
3582                goto error_exit;
3583            }
3584        }
3585
3586        if (mChannelHandle) {
3587
3588            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3589                    mChannelHandle);
3590            if (rc != NO_ERROR) {
3591                ALOGE("%s: start_channel failed %d", __func__, rc);
3592                pthread_mutex_unlock(&mMutex);
3593                goto error_exit;
3594            }
3595        }
3596
3597
3598        goto no_error;
3599error_exit:
3600        m_perfLock.lock_rel();
3601        return rc;
3602no_error:
3603        m_perfLock.lock_rel();
3604
3605        mWokenUpByDaemon = false;
3606        mPendingLiveRequest = 0;
3607        mFirstConfiguration = false;
3608        enablePowerHint();
3609    }
3610
3611    uint32_t frameNumber = request->frame_number;
3612    cam_stream_ID_t streamID;
3613
3614    if (meta.exists(ANDROID_REQUEST_ID)) {
3615        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3616        mCurrentRequestId = request_id;
3617        CDBG("%s: Received request with id: %d",__func__, request_id);
3618    } else if (mFirstRequest || mCurrentRequestId == -1){
3619        ALOGE("%s: Unable to find request id field, \
3620                & no previous id available", __func__);
3621        pthread_mutex_unlock(&mMutex);
3622        return NAME_NOT_FOUND;
3623    } else {
3624        CDBG("%s: Re-using old request id", __func__);
3625        request_id = mCurrentRequestId;
3626    }
3627
3628    CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
3629                                    __func__, __LINE__,
3630                                    request->num_output_buffers,
3631                                    request->input_buffer,
3632                                    frameNumber);
3633    // Acquire all request buffers first
3634    streamID.num_streams = 0;
3635    int blob_request = 0;
3636    uint32_t snapshotStreamId = 0;
3637    for (size_t i = 0; i < request->num_output_buffers; i++) {
3638        const camera3_stream_buffer_t& output = request->output_buffers[i];
3639        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3640
3641        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3642            //Call function to store local copy of jpeg data for encode params.
3643            blob_request = 1;
3644            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3645        }
3646
3647        if (output.acquire_fence != -1) {
3648           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3649           close(output.acquire_fence);
3650           if (rc != OK) {
3651              ALOGE("%s: sync wait failed %d", __func__, rc);
3652              pthread_mutex_unlock(&mMutex);
3653              return rc;
3654           }
3655        }
3656
3657        streamID.streamID[streamID.num_streams] =
3658            channel->getStreamID(channel->getStreamTypeMask());
3659        streamID.num_streams++;
3660
3661        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3662            isVidBufRequested = true;
3663        }
3664    }
3665
3666    if (blob_request && mRawDumpChannel) {
3667        CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
3668        streamID.streamID[streamID.num_streams] =
3669            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3670        streamID.num_streams++;
3671    }
3672
3673    if(request->input_buffer == NULL) {
3674        /* Parse the settings:
3675         * - For every request in NORMAL MODE
3676         * - For every request in HFR mode during preview only case
3677         * - For first request of every batch in HFR mode during video
3678         * recording. In batchmode the same settings except frame number is
3679         * repeated in each request of the batch.
3680         */
3681        if (!mBatchSize ||
3682           (mBatchSize && !isVidBufRequested) ||
3683           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
3684            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
3685            if (rc < 0) {
3686                ALOGE("%s: fail to set frame parameters", __func__);
3687                pthread_mutex_unlock(&mMutex);
3688                return rc;
3689            }
3690        }
3691        /* For batchMode HFR, setFrameParameters is not called for every
3692         * request. But only frame number of the latest request is parsed.
3693         * Keep track of first and last frame numbers in a batch so that
3694         * metadata for the frame numbers of batch can be duplicated in
3695         * handleBatchMetadta */
3696        if (mBatchSize) {
3697            if (!mToBeQueuedVidBufs) {
3698                //start of the batch
3699                mFirstFrameNumberInBatch = request->frame_number;
3700            }
3701            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3702                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
3703                ALOGE("%s: Failed to set the frame number in the parameters", __func__);
3704                return BAD_VALUE;
3705            }
3706        }
3707        if (mNeedSensorRestart) {
3708            /* Unlock the mutex as restartSensor waits on the channels to be
3709             * stopped, which in turn calls stream callback functions -
3710             * handleBufferWithLock and handleMetadataWithLock */
3711            pthread_mutex_unlock(&mMutex);
3712            rc = dynamicUpdateMetaStreamInfo();
3713            if (rc != NO_ERROR) {
3714                ALOGE("%s: Restarting the sensor failed", __func__);
3715                return BAD_VALUE;
3716            }
3717            mNeedSensorRestart = false;
3718            pthread_mutex_lock(&mMutex);
3719        }
3720    } else {
3721
3722        if (request->input_buffer->acquire_fence != -1) {
3723           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
3724           close(request->input_buffer->acquire_fence);
3725           if (rc != OK) {
3726              ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
3727              pthread_mutex_unlock(&mMutex);
3728              return rc;
3729           }
3730        }
3731    }
3732
3733    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
3734        mLastCustIntentFrmNum = frameNumber;
3735    }
3736    /* Update pending request list and pending buffers map */
3737    PendingRequestInfo pendingRequest;
3738    pendingRequestIterator latestRequest;
3739    pendingRequest.frame_number = frameNumber;
3740    pendingRequest.num_buffers = request->num_output_buffers;
3741    pendingRequest.request_id = request_id;
3742    pendingRequest.blob_request = blob_request;
3743    pendingRequest.timestamp = 0;
3744    pendingRequest.bUrgentReceived = 0;
3745    if (request->input_buffer) {
3746        pendingRequest.input_buffer =
3747                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
3748        *(pendingRequest.input_buffer) = *(request->input_buffer);
3749        pInputBuffer = pendingRequest.input_buffer;
3750    } else {
3751       pendingRequest.input_buffer = NULL;
3752       pInputBuffer = NULL;
3753    }
3754
3755    pendingRequest.pipeline_depth = 0;
3756    pendingRequest.partial_result_cnt = 0;
3757    extractJpegMetadata(mCurJpegMeta, request);
3758    pendingRequest.jpegMetadata = mCurJpegMeta;
3759    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
3760    pendingRequest.shutter_notified = false;
3761    pendingRequest.need_dynamic_blklvl = false;
3762    pendingRequest.pending_extra_result = false;
3763
3764    //extract capture intent
3765    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3766        mCaptureIntent =
3767                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3768    }
3769    if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
3770        mHybridAeEnable =
3771                meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
3772    }
3773    pendingRequest.capture_intent = mCaptureIntent;
3774    pendingRequest.hybrid_ae_enable = mHybridAeEnable;
3775
3776    for (size_t i = 0; i < request->num_output_buffers; i++) {
3777        RequestedBufferInfo requestedBuf;
3778        memset(&requestedBuf, 0, sizeof(requestedBuf));
3779        requestedBuf.stream = request->output_buffers[i].stream;
3780        requestedBuf.buffer = NULL;
3781        pendingRequest.buffers.push_back(requestedBuf);
3782
3783        // Add to buffer handle the pending buffers list
3784        PendingBufferInfo bufferInfo;
3785        bufferInfo.frame_number = frameNumber;
3786        bufferInfo.buffer = request->output_buffers[i].buffer;
3787        bufferInfo.stream = request->output_buffers[i].stream;
3788        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
3789        mPendingBuffersMap.num_buffers++;
3790        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
3791        CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
3792                __func__, frameNumber, bufferInfo.buffer,
3793                channel->getStreamTypeMask(), bufferInfo.stream->format);
3794
3795        if (bufferInfo.stream->format == HAL_PIXEL_FORMAT_RAW16) {
3796            if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
3797                CDBG("%s, frame_number:%d, need dynamic blacklevel", __func__, frameNumber);
3798                pendingRequest.need_dynamic_blklvl = true;
3799            }
3800        }
3801    }
3802    mPendingBuffersMap.last_frame_number = frameNumber;
3803    latestRequest = mPendingRequestsList.insert(
3804            mPendingRequestsList.end(), pendingRequest);
3805    if(mFlush) {
3806        pthread_mutex_unlock(&mMutex);
3807        return NO_ERROR;
3808    }
3809
3810    // Notify metadata channel we receive a request
3811    mMetadataChannel->request(NULL, frameNumber);
3812
3813    if(request->input_buffer != NULL){
3814        CDBG("%s: Input request, frame_number %d", __func__, frameNumber);
3815        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
3816        if (NO_ERROR != rc) {
3817            ALOGE("%s: fail to set reproc parameters", __func__);
3818            pthread_mutex_unlock(&mMutex);
3819            return rc;
3820        }
3821    }
3822
3823    // Call request on other streams
3824    uint32_t streams_need_metadata = 0;
3825    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
3826    for (size_t i = 0; i < request->num_output_buffers; i++) {
3827        const camera3_stream_buffer_t& output = request->output_buffers[i];
3828        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3829
3830        if (channel == NULL) {
3831            ALOGE("%s: invalid channel pointer for stream", __func__);
3832            continue;
3833        }
3834
3835        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3836            if(request->input_buffer != NULL){
3837                rc = channel->request(output.buffer, frameNumber,
3838                        pInputBuffer, &mReprocMeta);
3839                if (rc < 0) {
3840                    ALOGE("%s: Fail to request on picture channel", __func__);
3841                    pthread_mutex_unlock(&mMutex);
3842                    return rc;
3843                }
3844            } else {
3845                CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
3846                        __LINE__, output.buffer, frameNumber);
3847                if (!request->settings) {
3848                    rc = channel->request(output.buffer, frameNumber,
3849                            NULL, mPrevParameters);
3850                } else {
3851                    rc = channel->request(output.buffer, frameNumber,
3852                            NULL, mParameters);
3853                }
3854                if (rc < 0) {
3855                    ALOGE("%s: Fail to request on picture channel", __func__);
3856                    pthread_mutex_unlock(&mMutex);
3857                    return rc;
3858                }
3859                pendingBufferIter->need_metadata = true;
3860                streams_need_metadata++;
3861            }
3862        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
3863            bool needMetadata = false;
3864            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
3865            rc = yuvChannel->request(output.buffer, frameNumber,
3866                    pInputBuffer,
3867                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
3868            if (rc < 0) {
3869                ALOGE("%s: Fail to request on YUV channel", __func__);
3870                pthread_mutex_unlock(&mMutex);
3871                return rc;
3872            }
3873            pendingBufferIter->need_metadata = needMetadata;
3874            if (needMetadata)
3875                streams_need_metadata += 1;
3876            CDBG("%s: calling YUV channel request, need_metadata is %d",
3877                    __func__, needMetadata);
3878        } else {
3879            CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
3880                __LINE__, output.buffer, frameNumber);
3881            rc = channel->request(output.buffer, frameNumber);
3882            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3883                    && mBatchSize) {
3884                mToBeQueuedVidBufs++;
3885                if (mToBeQueuedVidBufs == mBatchSize) {
3886                    channel->queueBatchBuf();
3887                }
3888            }
3889            if (rc < 0) {
3890                ALOGE("%s: request failed", __func__);
3891                pthread_mutex_unlock(&mMutex);
3892                return rc;
3893            }
3894        }
3895        pendingBufferIter++;
3896    }
3897
3898    //If 2 streams have need_metadata set to true, fail the request, unless
3899    //we copy/reference count the metadata buffer
3900    if (streams_need_metadata > 1) {
3901        ALOGE("%s: not supporting request in which two streams requires"
3902                " 2 HAL metadata for reprocessing", __func__);
3903        pthread_mutex_unlock(&mMutex);
3904        return -EINVAL;
3905    }
3906
3907    if(request->input_buffer == NULL) {
3908        /* Set the parameters to backend:
3909         * - For every request in NORMAL MODE
3910         * - For every request in HFR mode during preview only case
3911         * - Once every batch in HFR mode during video recording
3912         */
3913        if (!mBatchSize ||
3914           (mBatchSize && !isVidBufRequested) ||
3915           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
3916            CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
3917                    __func__, mBatchSize, isVidBufRequested,
3918                    mToBeQueuedVidBufs);
3919            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3920                    mParameters);
3921            if (rc < 0) {
3922                ALOGE("%s: set_parms failed", __func__);
3923            }
3924            /* reset to zero coz, the batch is queued */
3925            mToBeQueuedVidBufs = 0;
3926            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
3927        }
3928        mPendingLiveRequest++;
3929    }
3930
3931    CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
3932
3933    mFirstRequest = false;
3934    // Added a timed condition wait
3935    struct timespec ts;
3936    uint8_t isValidTimeout = 1;
3937    rc = clock_gettime(CLOCK_MONOTONIC, &ts);
3938    if (rc < 0) {
3939      isValidTimeout = 0;
3940      ALOGE("%s: Error reading the real time clock!!", __func__);
3941    }
3942    else {
3943      // Make timeout as 5 sec for request to be honored
3944      ts.tv_sec += 5;
3945    }
3946    //Block on conditional variable
3947    if (mBatchSize) {
3948        /* For HFR, more buffers are dequeued upfront to improve the performance */
3949        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3950        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3951    }
3952    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
3953        if (!isValidTimeout) {
3954            CDBG("%s: Blocking on conditional wait", __func__);
3955            pthread_cond_wait(&mRequestCond, &mMutex);
3956        }
3957        else {
3958            CDBG("%s: Blocking on timed conditional wait", __func__);
3959            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
3960            if (rc == ETIMEDOUT) {
3961                rc = -ENODEV;
3962                ALOGE("%s: Unblocked on timeout!!!!", __func__);
3963                break;
3964            }
3965        }
3966        CDBG("%s: Unblocked", __func__);
3967        if (mWokenUpByDaemon) {
3968            mWokenUpByDaemon = false;
3969            if (mPendingLiveRequest < maxInFlightRequests)
3970                break;
3971        }
3972    }
3973    pthread_mutex_unlock(&mMutex);
3974
3975    return rc;
3976}
3977
3978/*===========================================================================
3979 * FUNCTION   : dump
3980 *
3981 * DESCRIPTION:
3982 *
3983 * PARAMETERS :
3984 *
3985 *
3986 * RETURN     :
3987 *==========================================================================*/
3988void QCamera3HardwareInterface::dump(int fd)
3989{
3990    pthread_mutex_lock(&mMutex);
3991    dprintf(fd, "\n Camera HAL3 information Begin \n");
3992
3993    dprintf(fd, "\nNumber of pending requests: %zu \n",
3994        mPendingRequestsList.size());
3995    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3996    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
3997    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
3998    for(pendingRequestIterator i = mPendingRequestsList.begin();
3999            i != mPendingRequestsList.end(); i++) {
4000        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4001        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4002        i->input_buffer);
4003    }
4004    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4005                mPendingBuffersMap.num_buffers);
4006    dprintf(fd, "-------+------------------\n");
4007    dprintf(fd, " Frame | Stream type mask \n");
4008    dprintf(fd, "-------+------------------\n");
4009    for(List<PendingBufferInfo>::iterator i =
4010        mPendingBuffersMap.mPendingBufferList.begin();
4011        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
4012        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
4013        dprintf(fd, " %5d | %11d \n",
4014                i->frame_number, channel->getStreamTypeMask());
4015    }
4016    dprintf(fd, "-------+------------------\n");
4017
4018    dprintf(fd, "\nPending frame drop list: %zu\n",
4019        mPendingFrameDropList.size());
4020    dprintf(fd, "-------+-----------\n");
4021    dprintf(fd, " Frame | Stream ID \n");
4022    dprintf(fd, "-------+-----------\n");
4023    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4024        i != mPendingFrameDropList.end(); i++) {
4025        dprintf(fd, " %5d | %9d \n",
4026            i->frame_number, i->stream_ID);
4027    }
4028    dprintf(fd, "-------+-----------\n");
4029
4030    dprintf(fd, "\n Camera HAL3 information End \n");
4031
4032    /* use dumpsys media.camera as trigger to send update debug level event */
4033    mUpdateDebugLevel = true;
4034    pthread_mutex_unlock(&mMutex);
4035    return;
4036}
4037
4038/*===========================================================================
4039 * FUNCTION   : flush
4040 *
4041 * DESCRIPTION:
4042 *
4043 * PARAMETERS :
4044 *
4045 *
4046 * RETURN     :
4047 *==========================================================================*/
4048int QCamera3HardwareInterface::flush()
4049{
4050    ATRACE_CALL();
4051    int32_t rc = NO_ERROR;
4052
4053    CDBG("%s: Unblocking Process Capture Request", __func__);
4054    pthread_mutex_lock(&mMutex);
4055
4056    if (mFirstRequest) {
4057        pthread_mutex_unlock(&mMutex);
4058        return NO_ERROR;
4059    }
4060
4061    mFlush = true;
4062    pthread_mutex_unlock(&mMutex);
4063
4064    rc = stopAllChannels();
4065    if (rc < 0) {
4066        ALOGE("%s: stopAllChannels failed", __func__);
4067        return rc;
4068    }
4069    if (mChannelHandle) {
4070        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4071                mChannelHandle);
4072    }
4073
4074    // Reset bundle info
4075    rc = setBundleInfo();
4076    if (rc < 0) {
4077        ALOGE("%s: setBundleInfo failed %d", __func__, rc);
4078        return rc;
4079    }
4080
4081    // Mutex Lock
4082    pthread_mutex_lock(&mMutex);
4083
4084    // Unblock process_capture_request
4085    mPendingLiveRequest = 0;
4086    pthread_cond_signal(&mRequestCond);
4087
4088    rc = notifyErrorForPendingRequests();
4089    if (rc < 0) {
4090        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
4091        pthread_mutex_unlock(&mMutex);
4092        return rc;
4093    }
4094
4095    mFlush = false;
4096
4097    // Start the Streams/Channels
4098    rc = startAllChannels();
4099    if (rc < 0) {
4100        ALOGE("%s: startAllChannels failed", __func__);
4101        pthread_mutex_unlock(&mMutex);
4102        return rc;
4103    }
4104
4105    if (mChannelHandle) {
4106        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4107                    mChannelHandle);
4108        if (rc < 0) {
4109            ALOGE("%s: start_channel failed", __func__);
4110            pthread_mutex_unlock(&mMutex);
4111            return rc;
4112        }
4113    }
4114
4115    pthread_mutex_unlock(&mMutex);
4116
4117    return 0;
4118}
4119
4120/*===========================================================================
4121 * FUNCTION   : captureResultCb
4122 *
4123 * DESCRIPTION: Callback handler for all capture result
4124 *              (streams, as well as metadata)
4125 *
4126 * PARAMETERS :
4127 *   @metadata : metadata information
4128 *   @buffer   : actual gralloc buffer to be returned to frameworks.
4129 *               NULL if metadata.
4130 *
4131 * RETURN     : NONE
4132 *==========================================================================*/
4133void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4134                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4135{
4136    if (metadata_buf) {
4137        if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
4138            handleBatchMetadata(metadata_buf,
4139                    true /* free_and_bufdone_meta_buf */);
4140        } else { /* mBatchSize = 0 */
4141            hdrPlusPerfLock(metadata_buf);
4142            pthread_mutex_lock(&mMutex);
4143            handleMetadataWithLock(metadata_buf,
4144                    true /* free_and_bufdone_meta_buf */,
4145                    true /* last urgent frame of batch metadata */,
4146                    true /* last frame of batch metadata */ );
4147            pthread_mutex_unlock(&mMutex);
4148        }
4149    } else if (isInputBuffer) {
4150        pthread_mutex_lock(&mMutex);
4151        handleInputBufferWithLock(buffer, frame_number);
4152        pthread_mutex_unlock(&mMutex);
4153    } else {
4154        pthread_mutex_lock(&mMutex);
4155        handleBufferWithLock(buffer, frame_number);
4156        pthread_mutex_unlock(&mMutex);
4157    }
4158    return;
4159}
4160
4161/*===========================================================================
4162 * FUNCTION   : getReprocessibleOutputStreamId
4163 *
4164 * DESCRIPTION: Get source output stream id for the input reprocess stream
4165 *              based on size and format, which would be the largest
4166 *              output stream if an input stream exists.
4167 *
4168 * PARAMETERS :
4169 *   @id      : return the stream id if found
4170 *
4171 * RETURN     : int32_t type of status
4172 *              NO_ERROR  -- success
4173 *              none-zero failure code
4174 *==========================================================================*/
4175int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4176{
4177    stream_info_t* stream = NULL;
4178
4179    /* check if any output or bidirectional stream with the same size and format
4180       and return that stream */
4181    if ((mInputStreamInfo.dim.width > 0) &&
4182            (mInputStreamInfo.dim.height > 0)) {
4183        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4184                it != mStreamInfo.end(); it++) {
4185
4186            camera3_stream_t *stream = (*it)->stream;
4187            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4188                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4189                    (stream->format == mInputStreamInfo.format)) {
4190                // Usage flag for an input stream and the source output stream
4191                // may be different.
4192                CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
4193                CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
4194                        __func__, stream->usage, mInputStreamInfo.usage);
4195
4196                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4197                if (channel != NULL && channel->mStreams[0]) {
4198                    id = channel->mStreams[0]->getMyServerID();
4199                    return NO_ERROR;
4200                }
4201            }
4202        }
4203    } else {
4204        CDBG("%s: No input stream, so no reprocessible output stream", __func__);
4205    }
4206    return NAME_NOT_FOUND;
4207}
4208
4209/*===========================================================================
4210 * FUNCTION   : lookupFwkName
4211 *
4212 * DESCRIPTION: In case the enum is not same in fwk and backend
4213 *              make sure the parameter is correctly propogated
4214 *
4215 * PARAMETERS  :
4216 *   @arr      : map between the two enums
4217 *   @len      : len of the map
4218 *   @hal_name : name of the hal_parm to map
4219 *
4220 * RETURN     : int type of status
4221 *              fwk_name  -- success
4222 *              none-zero failure code
4223 *==========================================================================*/
4224template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4225        size_t len, halType hal_name)
4226{
4227
4228    for (size_t i = 0; i < len; i++) {
4229        if (arr[i].hal_name == hal_name) {
4230            return arr[i].fwk_name;
4231        }
4232    }
4233
4234    /* Not able to find matching framework type is not necessarily
4235     * an error case. This happens when mm-camera supports more attributes
4236     * than the frameworks do */
4237    CDBG_HIGH("%s: Cannot find matching framework type", __func__);
4238    return NAME_NOT_FOUND;
4239}
4240
4241/*===========================================================================
4242 * FUNCTION   : lookupHalName
4243 *
4244 * DESCRIPTION: In case the enum is not same in fwk and backend
4245 *              make sure the parameter is correctly propogated
4246 *
4247 * PARAMETERS  :
4248 *   @arr      : map between the two enums
4249 *   @len      : len of the map
4250 *   @fwk_name : name of the hal_parm to map
4251 *
4252 * RETURN     : int32_t type of status
4253 *              hal_name  -- success
4254 *              none-zero failure code
4255 *==========================================================================*/
4256template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4257        size_t len, fwkType fwk_name)
4258{
4259    for (size_t i = 0; i < len; i++) {
4260        if (arr[i].fwk_name == fwk_name) {
4261            return arr[i].hal_name;
4262        }
4263    }
4264
4265    ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
4266    return NAME_NOT_FOUND;
4267}
4268
4269/*===========================================================================
4270 * FUNCTION   : lookupProp
4271 *
4272 * DESCRIPTION: lookup a value by its name
4273 *
4274 * PARAMETERS :
4275 *   @arr     : map between the two enums
4276 *   @len     : size of the map
4277 *   @name    : name to be looked up
4278 *
4279 * RETURN     : Value if found
4280 *              CAM_CDS_MODE_MAX if not found
4281 *==========================================================================*/
4282template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4283        size_t len, const char *name)
4284{
4285    if (name) {
4286        for (size_t i = 0; i < len; i++) {
4287            if (!strcmp(arr[i].desc, name)) {
4288                return arr[i].val;
4289            }
4290        }
4291    }
4292    return CAM_CDS_MODE_MAX;
4293}
4294
4295/*===========================================================================
4296 *
4297 * DESCRIPTION:
4298 *
4299 * PARAMETERS :
4300 *   @metadata : metadata information from callback
4301 *   @timestamp: metadata buffer timestamp
4302 *   @request_id: request id
4303 *   @hybrid_ae_enable: whether hybrid ae is enabled
4304 *   @jpegMetadata: additional jpeg metadata
4305 *   @pprocDone: whether internal offline postprocsesing is done
4306 *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
4307 *                         in a batch. Always true for non-batch mode.
4308 *
4309 * RETURN     : camera_metadata_t*
4310 *              metadata in a format specified by fwk
4311 *==========================================================================*/
4312camera_metadata_t*
4313QCamera3HardwareInterface::translateFromHalMetadata(
4314                                 metadata_buffer_t *metadata,
4315                                 nsecs_t timestamp,
4316                                 int32_t request_id,
4317                                 const CameraMetadata& jpegMetadata,
4318                                 uint8_t pipeline_depth,
4319                                 uint8_t capture_intent,
4320                                 uint8_t hybrid_ae_enable,
4321                                 bool pprocDone,
4322                                 bool dynamic_blklvl,
4323                                 bool lastMetadataInBatch)
4324{
4325    CameraMetadata camMetadata;
4326    camera_metadata_t *resultMetadata;
4327
4328    if (!lastMetadataInBatch) {
4329        /* In batch mode, use empty metadata if this is not the last in batch*/
4330        resultMetadata = allocate_camera_metadata(0, 0);
4331        return resultMetadata;
4332    }
4333
4334    if (jpegMetadata.entryCount())
4335        camMetadata.append(jpegMetadata);
4336
4337    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4338    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4339    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4340    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4341    camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
4342
4343    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4344        int64_t fwk_frame_number = *frame_number;
4345        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4346    }
4347
4348    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4349        int32_t fps_range[2];
4350        fps_range[0] = (int32_t)float_range->min_fps;
4351        fps_range[1] = (int32_t)float_range->max_fps;
4352        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4353                                      fps_range, 2);
4354        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4355            __func__, fps_range[0], fps_range[1]);
4356    }
4357
4358    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4359        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4360    }
4361
4362    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4363        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4364                METADATA_MAP_SIZE(SCENE_MODES_MAP),
4365                *sceneMode);
4366        if (NAME_NOT_FOUND != val) {
4367            uint8_t fwkSceneMode = (uint8_t)val;
4368            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4369            CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4370                    __func__, fwkSceneMode);
4371        }
4372    }
4373
4374    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4375        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4376        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4377    }
4378
4379    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4380        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4381        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4382    }
4383
4384    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4385        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4386        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4387    }
4388
4389    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4390            CAM_INTF_META_EDGE_MODE, metadata) {
4391        uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
4392        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4393    }
4394
4395    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4396        uint8_t fwk_flashPower = (uint8_t) *flashPower;
4397        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4398    }
4399
4400    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4401        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4402    }
4403
4404    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4405        if (0 <= *flashState) {
4406            uint8_t fwk_flashState = (uint8_t) *flashState;
4407            if (!gCamCapability[mCameraId]->flash_available) {
4408                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4409            }
4410            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4411        }
4412    }
4413
4414    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4415        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4416        if (NAME_NOT_FOUND != val) {
4417            uint8_t fwk_flashMode = (uint8_t)val;
4418            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4419        }
4420    }
4421
4422    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4423        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4424        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4425    }
4426
4427    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4428        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4429    }
4430
4431    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4432        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4433    }
4434
4435    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4436        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4437    }
4438
4439    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4440        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4441        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4442    }
4443
4444    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4445        uint8_t fwk_videoStab = (uint8_t) *videoStab;
4446        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4447    }
4448
4449    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4450        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4451        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4452    }
4453
4454    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4455        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4456    }
4457
4458    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4459        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4460
4461        CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
4462          blackLevelSourcePattern->cam_black_level[0],
4463          blackLevelSourcePattern->cam_black_level[1],
4464          blackLevelSourcePattern->cam_black_level[2],
4465          blackLevelSourcePattern->cam_black_level[3]);
4466    }
4467
4468    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4469        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4470        float fwk_blackLevelInd[4];
4471
4472        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4473        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4474        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4475        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4476
4477        CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
4478          blackLevelAppliedPattern->cam_black_level[0],
4479          blackLevelAppliedPattern->cam_black_level[1],
4480          blackLevelAppliedPattern->cam_black_level[2],
4481          blackLevelAppliedPattern->cam_black_level[3]);
4482        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
4483        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4484
4485        // if dynmaic_blklvl is true, we calculate blklvl from raw callback
4486        // otherwise, use the value from linearization LUT.
4487        if (dynamic_blklvl == false) {
4488            // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4489            // depth space.
4490            fwk_blackLevelInd[0] /= 64.0;
4491            fwk_blackLevelInd[1] /= 64.0;
4492            fwk_blackLevelInd[2] /= 64.0;
4493            fwk_blackLevelInd[3] /= 64.0;
4494            camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
4495        }
4496    }
4497
4498    // Fixed whitelevel is used by ISP/Sensor
4499    camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
4500            &gCamCapability[mCameraId]->white_level, 1);
4501
4502    if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
4503        gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
4504        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
4505        for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
4506            opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
4507        }
4508        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
4509                opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
4510    }
4511
4512    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
4513            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
4514        int32_t scalerCropRegion[4];
4515        scalerCropRegion[0] = hScalerCropRegion->left;
4516        scalerCropRegion[1] = hScalerCropRegion->top;
4517        scalerCropRegion[2] = hScalerCropRegion->width;
4518        scalerCropRegion[3] = hScalerCropRegion->height;
4519
4520        // Adjust crop region from sensor output coordinate system to active
4521        // array coordinate system.
4522        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
4523                scalerCropRegion[2], scalerCropRegion[3]);
4524
4525        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
4526    }
4527
4528    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
4529        CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
4530        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
4531    }
4532
4533    IF_META_AVAILABLE(int64_t, sensorFameDuration,
4534            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
4535        CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
4536        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
4537    }
4538
4539    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
4540            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
4541        CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
4542        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
4543                sensorRollingShutterSkew, 1);
4544    }
4545
4546    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
4547        CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
4548        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
4549
4550        //calculate the noise profile based on sensitivity
4551        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
4552        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
4553        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
4554        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
4555            noise_profile[i]   = noise_profile_S;
4556            noise_profile[i+1] = noise_profile_O;
4557        }
4558        CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
4559                noise_profile_S, noise_profile_O);
4560        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
4561                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
4562    }
4563
4564    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
4565        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
4566        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
4567    }
4568
4569    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
4570        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
4571                *faceDetectMode);
4572        if (NAME_NOT_FOUND != val) {
4573            uint8_t fwk_faceDetectMode = (uint8_t)val;
4574            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
4575
4576            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4577                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
4578                        CAM_INTF_META_FACE_DETECTION, metadata) {
4579                    uint8_t numFaces = MIN(
4580                            faceDetectionInfo->num_faces_detected, MAX_ROI);
4581                    int32_t faceIds[MAX_ROI];
4582                    uint8_t faceScores[MAX_ROI];
4583                    int32_t faceRectangles[MAX_ROI * 4];
4584                    int32_t faceLandmarks[MAX_ROI * 6];
4585                    size_t j = 0, k = 0;
4586
4587                    for (size_t i = 0; i < numFaces; i++) {
4588                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
4589                        // Adjust crop region from sensor output coordinate system to active
4590                        // array coordinate system.
4591                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
4592                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
4593                                rect.width, rect.height);
4594
4595                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
4596                                faceRectangles+j, -1);
4597
4598                        // Map the co-ordinate sensor output coordinate system to active
4599                        // array coordinate system.
4600                        cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
4601                        mCropRegionMapper.toActiveArray(face.left_eye_center.x,
4602                                face.left_eye_center.y);
4603                        mCropRegionMapper.toActiveArray(face.right_eye_center.x,
4604                                face.right_eye_center.y);
4605                        mCropRegionMapper.toActiveArray(face.mouth_center.x,
4606                                face.mouth_center.y);
4607
4608                        convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
4609                        j+= 4;
4610                        k+= 6;
4611                    }
4612                    if (numFaces <= 0) {
4613                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
4614                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
4615                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
4616                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
4617                    }
4618
4619                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
4620                            numFaces);
4621                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
4622                            faceRectangles, numFaces * 4U);
4623                    if (fwk_faceDetectMode ==
4624                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
4625                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
4626                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
4627                                faceLandmarks, numFaces * 6U);
4628                   }
4629                }
4630            }
4631        }
4632    }
4633
4634    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
4635        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
4636        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
4637    }
4638
4639    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
4640            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
4641        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
4642        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
4643    }
4644
4645    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
4646            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
4647        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
4648                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
4649    }
4650
4651    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
4652            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
4653        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
4654                CAM_MAX_SHADING_MAP_HEIGHT);
4655        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
4656                CAM_MAX_SHADING_MAP_WIDTH);
4657        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
4658                lensShadingMap->lens_shading, 4U * map_width * map_height);
4659    }
4660
4661    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
4662        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
4663        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
4664    }
4665
4666    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
4667        //Populate CAM_INTF_META_TONEMAP_CURVES
4668        /* ch0 = G, ch 1 = B, ch 2 = R*/
4669        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4670            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4671                    __func__, tonemap->tonemap_points_cnt,
4672                    CAM_MAX_TONEMAP_CURVE_SIZE);
4673            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4674        }
4675
4676        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
4677                        &tonemap->curves[0].tonemap_points[0][0],
4678                        tonemap->tonemap_points_cnt * 2);
4679
4680        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
4681                        &tonemap->curves[1].tonemap_points[0][0],
4682                        tonemap->tonemap_points_cnt * 2);
4683
4684        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
4685                        &tonemap->curves[2].tonemap_points[0][0],
4686                        tonemap->tonemap_points_cnt * 2);
4687    }
4688
4689    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
4690            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
4691        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
4692                CC_GAINS_COUNT);
4693    }
4694
4695    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
4696            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
4697        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
4698                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
4699                CC_MATRIX_COLS * CC_MATRIX_ROWS);
4700    }
4701
4702    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
4703            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
4704        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
4705            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
4706                    __func__, toneCurve->tonemap_points_cnt,
4707                    CAM_MAX_TONEMAP_CURVE_SIZE);
4708            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
4709        }
4710        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
4711                (float*)toneCurve->curve.tonemap_points,
4712                toneCurve->tonemap_points_cnt * 2);
4713    }
4714
4715    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
4716            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
4717        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
4718                predColorCorrectionGains->gains, 4);
4719    }
4720
4721    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
4722            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
4723        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
4724                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
4725                CC_MATRIX_ROWS * CC_MATRIX_COLS);
4726    }
4727
4728    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
4729        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
4730    }
4731
4732    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
4733        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
4734        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
4735    }
4736
4737    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
4738        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
4739        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
4740    }
4741
4742    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
4743        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
4744                *effectMode);
4745        if (NAME_NOT_FOUND != val) {
4746            uint8_t fwk_effectMode = (uint8_t)val;
4747            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
4748        }
4749    }
4750
4751    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
4752            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
4753        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
4754                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
4755        if (NAME_NOT_FOUND != fwk_testPatternMode) {
4756            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
4757        }
4758        int32_t fwk_testPatternData[4];
4759        fwk_testPatternData[0] = testPatternData->r;
4760        fwk_testPatternData[3] = testPatternData->b;
4761        switch (gCamCapability[mCameraId]->color_arrangement) {
4762        case CAM_FILTER_ARRANGEMENT_RGGB:
4763        case CAM_FILTER_ARRANGEMENT_GRBG:
4764            fwk_testPatternData[1] = testPatternData->gr;
4765            fwk_testPatternData[2] = testPatternData->gb;
4766            break;
4767        case CAM_FILTER_ARRANGEMENT_GBRG:
4768        case CAM_FILTER_ARRANGEMENT_BGGR:
4769            fwk_testPatternData[2] = testPatternData->gr;
4770            fwk_testPatternData[1] = testPatternData->gb;
4771            break;
4772        default:
4773            ALOGE("%s: color arrangement %d is not supported", __func__,
4774                gCamCapability[mCameraId]->color_arrangement);
4775            break;
4776        }
4777        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
4778    }
4779
4780    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4781        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
4782    }
4783
4784    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4785        String8 str((const char *)gps_methods);
4786        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
4787    }
4788
4789    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4790        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
4791    }
4792
4793    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4794        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
4795    }
4796
4797    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
4798        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
4799        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
4800    }
4801
4802    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4803        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
4804        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
4805    }
4806
4807    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4808        int32_t fwk_thumb_size[2];
4809        fwk_thumb_size[0] = thumb_size->width;
4810        fwk_thumb_size[1] = thumb_size->height;
4811        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
4812    }
4813
4814    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
4815        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
4816                privateData,
4817                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
4818    }
4819
4820    if (metadata->is_tuning_params_valid) {
4821        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
4822        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
4823        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
4824
4825
4826        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
4827                sizeof(uint32_t));
4828        data += sizeof(uint32_t);
4829
4830        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
4831                sizeof(uint32_t));
4832        CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
4833        data += sizeof(uint32_t);
4834
4835        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
4836                sizeof(uint32_t));
4837        CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
4838        data += sizeof(uint32_t);
4839
4840        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
4841                sizeof(uint32_t));
4842        CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
4843        data += sizeof(uint32_t);
4844
4845        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
4846                sizeof(uint32_t));
4847        CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
4848        data += sizeof(uint32_t);
4849
4850        metadata->tuning_params.tuning_mod3_data_size = 0;
4851        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
4852                sizeof(uint32_t));
4853        CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
4854        data += sizeof(uint32_t);
4855
4856        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
4857                TUNING_SENSOR_DATA_MAX);
4858        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
4859                count);
4860        data += count;
4861
4862        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
4863                TUNING_VFE_DATA_MAX);
4864        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
4865                count);
4866        data += count;
4867
4868        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
4869                TUNING_CPP_DATA_MAX);
4870        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
4871                count);
4872        data += count;
4873
4874        count = MIN(metadata->tuning_params.tuning_cac_data_size,
4875                TUNING_CAC_DATA_MAX);
4876        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
4877                count);
4878        data += count;
4879
4880        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
4881                (int32_t *)(void *)tuning_meta_data_blob,
4882                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
4883    }
4884
4885    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
4886            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
4887        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
4888                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
4889                NEUTRAL_COL_POINTS);
4890    }
4891
4892    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
4893        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
4894        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
4895    }
4896
4897    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
4898        int32_t aeRegions[REGIONS_TUPLE_COUNT];
4899        // Adjust crop region from sensor output coordinate system to active
4900        // array coordinate system.
4901        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
4902                hAeRegions->rect.width, hAeRegions->rect.height);
4903
4904        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
4905        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
4906                REGIONS_TUPLE_COUNT);
4907        CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4908                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
4909                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
4910                hAeRegions->rect.height);
4911    }
4912
4913    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
4914        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
4915        if (NAME_NOT_FOUND != val) {
4916            uint8_t fwkAfMode = (uint8_t)val;
4917            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
4918            CDBG("%s: Metadata : ANDROID_CONTROL_AF_MODE %d", __func__, val);
4919        } else {
4920            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_AF_MODE %d",
4921                    __func__, val);
4922        }
4923    }
4924
4925    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
4926        uint8_t fwk_afState = (uint8_t) *afState;
4927        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
4928        CDBG("%s: Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
4929    }
4930
4931    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
4932        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
4933    }
4934
4935    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
4936        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
4937    }
4938
4939    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
4940        uint8_t fwk_lensState = *lensState;
4941        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
4942    }
4943
4944    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
4945        /*af regions*/
4946        int32_t afRegions[REGIONS_TUPLE_COUNT];
4947        // Adjust crop region from sensor output coordinate system to active
4948        // array coordinate system.
4949        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
4950                hAfRegions->rect.width, hAfRegions->rect.height);
4951
4952        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
4953        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
4954                REGIONS_TUPLE_COUNT);
4955        CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
4956                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
4957                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
4958                hAfRegions->rect.height);
4959    }
4960
4961    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
4962        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
4963                *hal_ab_mode);
4964        if (NAME_NOT_FOUND != val) {
4965            uint8_t fwk_ab_mode = (uint8_t)val;
4966            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
4967        }
4968    }
4969
4970    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4971        int val = lookupFwkName(SCENE_MODES_MAP,
4972                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
4973        if (NAME_NOT_FOUND != val) {
4974            uint8_t fwkBestshotMode = (uint8_t)val;
4975            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
4976            CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
4977        } else {
4978            CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
4979        }
4980    }
4981
4982    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
4983         uint8_t fwk_mode = (uint8_t) *mode;
4984         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
4985    }
4986
4987    /* Constant metadata values to be update*/
4988    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
4989    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
4990
4991    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
4992    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
4993
4994    int32_t hotPixelMap[2];
4995    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
4996
4997    // CDS
4998    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
4999        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5000    }
5001
5002    // TNR
5003    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5004        uint8_t tnr_enable       = tnr->denoise_enable;
5005        int32_t tnr_process_type = (int32_t)tnr->process_plates;
5006
5007        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5008        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5009    }
5010
5011    // Reprocess crop data
5012    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5013        uint8_t cnt = crop_data->num_of_streams;
5014        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5015            // mm-qcamera-daemon only posts crop_data for streams
5016            // not linked to pproc. So no valid crop metadata is not
5017            // necessarily an error case.
5018            CDBG("%s: No valid crop metadata entries", __func__);
5019        } else {
5020            uint32_t reproc_stream_id;
5021            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5022                CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
5023            } else {
5024                int rc = NO_ERROR;
5025                Vector<int32_t> roi_map;
5026                int32_t *crop = new int32_t[cnt*4];
5027                if (NULL == crop) {
5028                   rc = NO_MEMORY;
5029                }
5030                if (NO_ERROR == rc) {
5031                    int32_t streams_found = 0;
5032                    for (size_t i = 0; i < cnt; i++) {
5033                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5034                            if (pprocDone) {
5035                                // HAL already does internal reprocessing,
5036                                // either via reprocessing before JPEG encoding,
5037                                // or offline postprocessing for pproc bypass case.
5038                                crop[0] = 0;
5039                                crop[1] = 0;
5040                                crop[2] = mInputStreamInfo.dim.width;
5041                                crop[3] = mInputStreamInfo.dim.height;
5042                            } else {
5043                                crop[0] = crop_data->crop_info[i].crop.left;
5044                                crop[1] = crop_data->crop_info[i].crop.top;
5045                                crop[2] = crop_data->crop_info[i].crop.width;
5046                                crop[3] = crop_data->crop_info[i].crop.height;
5047                            }
5048                            roi_map.add(crop_data->crop_info[i].roi_map.left);
5049                            roi_map.add(crop_data->crop_info[i].roi_map.top);
5050                            roi_map.add(crop_data->crop_info[i].roi_map.width);
5051                            roi_map.add(crop_data->crop_info[i].roi_map.height);
5052                            streams_found++;
5053                            CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
5054                                    __func__,
5055                                    crop[0], crop[1], crop[2], crop[3]);
5056                            CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
5057                                    __func__,
5058                                    crop_data->crop_info[i].roi_map.left,
5059                                    crop_data->crop_info[i].roi_map.top,
5060                                    crop_data->crop_info[i].roi_map.width,
5061                                    crop_data->crop_info[i].roi_map.height);
5062                            break;
5063
5064                       }
5065                    }
5066                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5067                            &streams_found, 1);
5068                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
5069                            crop, (size_t)(streams_found * 4));
5070                    if (roi_map.array()) {
5071                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5072                                roi_map.array(), roi_map.size());
5073                    }
5074               }
5075               if (crop) {
5076                   delete [] crop;
5077               }
5078            }
5079        }
5080    }
5081
5082    IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5083        int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5084                *cacMode);
5085        if (NAME_NOT_FOUND != val) {
5086            uint8_t fwkCacMode = (uint8_t)val;
5087            camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5088        } else {
5089            ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
5090        }
5091    }
5092
5093    // Post blob of cam_cds_data through vendor tag.
5094    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5095        uint8_t cnt = cdsInfo->num_of_streams;
5096        cam_cds_data_t cdsDataOverride;
5097        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5098        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5099        cdsDataOverride.num_of_streams = 1;
5100        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5101            uint32_t reproc_stream_id;
5102            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5103                CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
5104            } else {
5105                for (size_t i = 0; i < cnt; i++) {
5106                    if (cdsInfo->cds_info[i].stream_id ==
5107                            reproc_stream_id) {
5108                        cdsDataOverride.cds_info[0].cds_enable =
5109                                cdsInfo->cds_info[i].cds_enable;
5110                        break;
5111                    }
5112                }
5113            }
5114        } else {
5115            CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
5116        }
5117        camMetadata.update(QCAMERA3_CDS_INFO,
5118                (uint8_t *)&cdsDataOverride,
5119                sizeof(cam_cds_data_t));
5120    }
5121
5122    // Ldaf calibration data
5123    if (!mLdafCalibExist) {
5124        IF_META_AVAILABLE(uint32_t, ldafCalib,
5125                CAM_INTF_META_LDAF_EXIF, metadata) {
5126            mLdafCalibExist = true;
5127            mLdafCalib[0] = ldafCalib[0];
5128            mLdafCalib[1] = ldafCalib[1];
5129            CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
5130                    ldafCalib[0], ldafCalib[1]);
5131        }
5132    }
5133
5134    // Post Raw Sensitivity Boost = ISP digital gain
5135    IF_META_AVAILABLE(float, ispDigitalGain, CAM_INTF_META_ISP_DIGITAL_GAIN, metadata) {
5136        int32_t postRawSensitivity = static_cast<int32_t>(*ispDigitalGain * 100);
5137        camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &postRawSensitivity, 1);
5138    }
5139
5140    resultMetadata = camMetadata.release();
5141    return resultMetadata;
5142}
5143
5144/*===========================================================================
5145 * FUNCTION   : saveExifParams
5146 *
5147 * DESCRIPTION:
5148 *
5149 * PARAMETERS :
5150 *   @metadata : metadata information from callback
5151 *
5152 * RETURN     : none
5153 *
5154 *==========================================================================*/
5155void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5156{
5157    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5158            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5159        mExifParams.ae_debug_params = *ae_exif_debug_params;
5160        mExifParams.ae_debug_params_valid = TRUE;
5161    }
5162    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5163            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5164        mExifParams.awb_debug_params = *awb_exif_debug_params;
5165        mExifParams.awb_debug_params_valid = TRUE;
5166    }
5167    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5168            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5169        mExifParams.af_debug_params = *af_exif_debug_params;
5170        mExifParams.af_debug_params_valid = TRUE;
5171    }
5172    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5173            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5174        mExifParams.asd_debug_params = *asd_exif_debug_params;
5175        mExifParams.asd_debug_params_valid = TRUE;
5176    }
5177    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5178            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5179        mExifParams.stats_debug_params = *stats_exif_debug_params;
5180        mExifParams.stats_debug_params_valid = TRUE;
5181    }
5182}
5183
5184/*===========================================================================
5185 * FUNCTION   : get3AExifParams
5186 *
5187 * DESCRIPTION:
5188 *
5189 * PARAMETERS : none
5190 *
5191 *
5192 * RETURN     : mm_jpeg_exif_params_t
5193 *
5194 *==========================================================================*/
5195mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5196{
5197    return mExifParams;
5198}
5199
5200/*===========================================================================
5201 * FUNCTION   : translateCbUrgentMetadataToResultMetadata
5202 *
5203 * DESCRIPTION:
5204 *
5205 * PARAMETERS :
5206 *   @metadata : metadata information from callback
5207 *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
5208 *                               urgent metadata in a batch. Always true for
5209 *                               non-batch mode.
5210 *
5211 * RETURN     : camera_metadata_t*
5212 *              metadata in a format specified by fwk
5213 *==========================================================================*/
5214camera_metadata_t*
5215QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5216                                (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
5217{
5218    CameraMetadata camMetadata;
5219    camera_metadata_t *resultMetadata;
5220
5221    if (!lastUrgentMetadataInBatch) {
5222        /* In batch mode, use empty metadata if this is not the last in batch
5223         */
5224        resultMetadata = allocate_camera_metadata(0, 0);
5225        return resultMetadata;
5226    }
5227
5228    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5229        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5230        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5231        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
5232    }
5233
5234    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5235        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5236                &aecTrigger->trigger, 1);
5237        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5238                &aecTrigger->trigger_id, 1);
5239        CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5240                __func__, aecTrigger->trigger);
5241        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
5242                aecTrigger->trigger_id);
5243    }
5244
5245    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5246        uint8_t fwk_ae_state = (uint8_t) *ae_state;
5247        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5248        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
5249    }
5250
5251    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5252        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5253                &af_trigger->trigger, 1);
5254        CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5255                __func__, af_trigger->trigger);
5256        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5257        CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
5258                af_trigger->trigger_id);
5259    }
5260
5261    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5262        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5263                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5264        if (NAME_NOT_FOUND != val) {
5265            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5266            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5267            CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
5268        } else {
5269            CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
5270        }
5271    }
5272
5273    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5274    uint32_t aeMode = CAM_AE_MODE_MAX;
5275    int32_t flashMode = CAM_FLASH_MODE_MAX;
5276    int32_t redeye = -1;
5277    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5278        aeMode = *pAeMode;
5279    }
5280    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5281        flashMode = *pFlashMode;
5282    }
5283    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5284        redeye = *pRedeye;
5285    }
5286
5287    if (1 == redeye) {
5288        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5289        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5290    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5291        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5292                flashMode);
5293        if (NAME_NOT_FOUND != val) {
5294            fwk_aeMode = (uint8_t)val;
5295            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5296        } else {
5297            ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
5298        }
5299    } else if (aeMode == CAM_AE_MODE_ON) {
5300        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5301        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5302    } else if (aeMode == CAM_AE_MODE_OFF) {
5303        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5304        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5305    } else {
5306        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5307              "flashMode:%d, aeMode:%u!!!",
5308                __func__, redeye, flashMode, aeMode);
5309    }
5310
5311    resultMetadata = camMetadata.release();
5312    return resultMetadata;
5313}
5314
5315/*===========================================================================
5316 * FUNCTION   : dumpMetadataToFile
5317 *
5318 * DESCRIPTION: Dumps tuning metadata to file system
5319 *
5320 * PARAMETERS :
5321 *   @meta           : tuning metadata
5322 *   @dumpFrameCount : current dump frame count
5323 *   @enabled        : Enable mask
5324 *
5325 *==========================================================================*/
5326void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5327                                                   uint32_t &dumpFrameCount,
5328                                                   bool enabled,
5329                                                   const char *type,
5330                                                   uint32_t frameNumber)
5331{
5332    uint32_t frm_num = 0;
5333
5334    //Some sanity checks
5335    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5336        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
5337              __func__,
5338              meta.tuning_sensor_data_size,
5339              TUNING_SENSOR_DATA_MAX);
5340        return;
5341    }
5342
5343    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5344        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
5345              __func__,
5346              meta.tuning_vfe_data_size,
5347              TUNING_VFE_DATA_MAX);
5348        return;
5349    }
5350
5351    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
5352        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
5353              __func__,
5354              meta.tuning_cpp_data_size,
5355              TUNING_CPP_DATA_MAX);
5356        return;
5357    }
5358
5359    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
5360        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
5361              __func__,
5362              meta.tuning_cac_data_size,
5363              TUNING_CAC_DATA_MAX);
5364        return;
5365    }
5366    //
5367
5368    if(enabled){
5369        char timeBuf[FILENAME_MAX];
5370        char buf[FILENAME_MAX];
5371        memset(buf, 0, sizeof(buf));
5372        memset(timeBuf, 0, sizeof(timeBuf));
5373        time_t current_time;
5374        struct tm * timeinfo;
5375        time (&current_time);
5376        timeinfo = localtime (&current_time);
5377        if (timeinfo != NULL) {
5378            strftime (timeBuf, sizeof(timeBuf),
5379                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
5380        }
5381        String8 filePath(timeBuf);
5382        snprintf(buf,
5383                sizeof(buf),
5384                "%dm_%s_%d.bin",
5385                dumpFrameCount,
5386                type,
5387                frameNumber);
5388        filePath.append(buf);
5389        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
5390        if (file_fd >= 0) {
5391            ssize_t written_len = 0;
5392            meta.tuning_data_version = TUNING_DATA_VERSION;
5393            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
5394            written_len += write(file_fd, data, sizeof(uint32_t));
5395            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
5396            CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
5397            written_len += write(file_fd, data, sizeof(uint32_t));
5398            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
5399            CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
5400            written_len += write(file_fd, data, sizeof(uint32_t));
5401            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
5402            CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
5403            written_len += write(file_fd, data, sizeof(uint32_t));
5404            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
5405            CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
5406            written_len += write(file_fd, data, sizeof(uint32_t));
5407            meta.tuning_mod3_data_size = 0;
5408            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
5409            CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
5410            written_len += write(file_fd, data, sizeof(uint32_t));
5411            size_t total_size = meta.tuning_sensor_data_size;
5412            data = (void *)((uint8_t *)&meta.data);
5413            written_len += write(file_fd, data, total_size);
5414            total_size = meta.tuning_vfe_data_size;
5415            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
5416            written_len += write(file_fd, data, total_size);
5417            total_size = meta.tuning_cpp_data_size;
5418            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
5419            written_len += write(file_fd, data, total_size);
5420            total_size = meta.tuning_cac_data_size;
5421            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
5422            written_len += write(file_fd, data, total_size);
5423            close(file_fd);
5424        }else {
5425            ALOGE("%s: fail to open file for metadata dumping", __func__);
5426        }
5427    }
5428}
5429
5430/*===========================================================================
5431 * FUNCTION   : cleanAndSortStreamInfo
5432 *
5433 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
5434 *              and sort them such that raw stream is at the end of the list
5435 *              This is a workaround for camera daemon constraint.
5436 *
5437 * PARAMETERS : None
5438 *
5439 *==========================================================================*/
5440void QCamera3HardwareInterface::cleanAndSortStreamInfo()
5441{
5442    List<stream_info_t *> newStreamInfo;
5443
5444    /*clean up invalid streams*/
5445    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
5446            it != mStreamInfo.end();) {
5447        if(((*it)->status) == INVALID){
5448            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
5449            delete channel;
5450            free(*it);
5451            it = mStreamInfo.erase(it);
5452        } else {
5453            it++;
5454        }
5455    }
5456
5457    // Move preview/video/callback/snapshot streams into newList
5458    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5459            it != mStreamInfo.end();) {
5460        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
5461                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
5462                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
5463            newStreamInfo.push_back(*it);
5464            it = mStreamInfo.erase(it);
5465        } else
5466            it++;
5467    }
5468    // Move raw streams into newList
5469    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5470            it != mStreamInfo.end();) {
5471        newStreamInfo.push_back(*it);
5472        it = mStreamInfo.erase(it);
5473    }
5474
5475    mStreamInfo = newStreamInfo;
5476}
5477
5478/*===========================================================================
5479 * FUNCTION   : extractJpegMetadata
5480 *
5481 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
5482 *              JPEG metadata is cached in HAL, and return as part of capture
5483 *              result when metadata is returned from camera daemon.
5484 *
5485 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
5486 *              @request:      capture request
5487 *
5488 *==========================================================================*/
5489void QCamera3HardwareInterface::extractJpegMetadata(
5490        CameraMetadata& jpegMetadata,
5491        const camera3_capture_request_t *request)
5492{
5493    CameraMetadata frame_settings;
5494    frame_settings = request->settings;
5495
5496    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
5497        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
5498                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
5499                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
5500
5501    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
5502        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
5503                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
5504                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
5505
5506    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
5507        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
5508                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
5509                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
5510
5511    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
5512        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
5513                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
5514                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
5515
5516    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
5517        jpegMetadata.update(ANDROID_JPEG_QUALITY,
5518                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
5519                frame_settings.find(ANDROID_JPEG_QUALITY).count);
5520
5521    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
5522        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
5523                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
5524                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
5525
5526    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
5527        int32_t thumbnail_size[2];
5528        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
5529        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
5530        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
5531            int32_t orientation =
5532                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
5533            if ((orientation == 90) || (orientation == 270)) {
5534               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
5535               int32_t temp;
5536               temp = thumbnail_size[0];
5537               thumbnail_size[0] = thumbnail_size[1];
5538               thumbnail_size[1] = temp;
5539            }
5540         }
5541         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
5542                thumbnail_size,
5543                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
5544    }
5545
5546}
5547
5548/*===========================================================================
5549 * FUNCTION   : convertToRegions
5550 *
5551 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
5552 *
5553 * PARAMETERS :
5554 *   @rect   : cam_rect_t struct to convert
5555 *   @region : int32_t destination array
5556 *   @weight : if we are converting from cam_area_t, weight is valid
5557 *             else weight = -1
5558 *
5559 *==========================================================================*/
5560void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
5561        int32_t *region, int weight)
5562{
5563    region[0] = rect.left;
5564    region[1] = rect.top;
5565    region[2] = rect.left + rect.width;
5566    region[3] = rect.top + rect.height;
5567    if (weight > -1) {
5568        region[4] = weight;
5569    }
5570}
5571
5572/*===========================================================================
5573 * FUNCTION   : convertFromRegions
5574 *
5575 * DESCRIPTION: helper method to convert from array to cam_rect_t
5576 *
5577 * PARAMETERS :
5578 *   @rect   : cam_rect_t struct to convert
5579 *   @region : int32_t destination array
5580 *   @weight : if we are converting from cam_area_t, weight is valid
5581 *             else weight = -1
5582 *
5583 *==========================================================================*/
5584void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
5585        const camera_metadata_t *settings, uint32_t tag)
5586{
5587    CameraMetadata frame_settings;
5588    frame_settings = settings;
5589    int32_t x_min = frame_settings.find(tag).data.i32[0];
5590    int32_t y_min = frame_settings.find(tag).data.i32[1];
5591    int32_t x_max = frame_settings.find(tag).data.i32[2];
5592    int32_t y_max = frame_settings.find(tag).data.i32[3];
5593    roi.weight = frame_settings.find(tag).data.i32[4];
5594    roi.rect.left = x_min;
5595    roi.rect.top = y_min;
5596    roi.rect.width = x_max - x_min;
5597    roi.rect.height = y_max - y_min;
5598}
5599
5600/*===========================================================================
5601 * FUNCTION   : resetIfNeededROI
5602 *
5603 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
5604 *              crop region
5605 *
5606 * PARAMETERS :
5607 *   @roi       : cam_area_t struct to resize
5608 *   @scalerCropRegion : cam_crop_region_t region to compare against
5609 *
5610 *
5611 *==========================================================================*/
5612bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
5613                                                 const cam_crop_region_t* scalerCropRegion)
5614{
5615    int32_t roi_x_max = roi->rect.width + roi->rect.left;
5616    int32_t roi_y_max = roi->rect.height + roi->rect.top;
5617    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
5618    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
5619
5620    /* According to spec weight = 0 is used to indicate roi needs to be disabled
5621     * without having this check the calculations below to validate if the roi
5622     * is inside scalar crop region will fail resulting in the roi not being
5623     * reset causing algorithm to continue to use stale roi window
5624     */
5625    if (roi->weight == 0) {
5626        return true;
5627    }
5628
5629    if ((roi_x_max < scalerCropRegion->left) ||
5630        // right edge of roi window is left of scalar crop's left edge
5631        (roi_y_max < scalerCropRegion->top)  ||
5632        // bottom edge of roi window is above scalar crop's top edge
5633        (roi->rect.left > crop_x_max) ||
5634        // left edge of roi window is beyond(right) of scalar crop's right edge
5635        (roi->rect.top > crop_y_max)){
5636        // top edge of roi windo is above scalar crop's top edge
5637        return false;
5638    }
5639    if (roi->rect.left < scalerCropRegion->left) {
5640        roi->rect.left = scalerCropRegion->left;
5641    }
5642    if (roi->rect.top < scalerCropRegion->top) {
5643        roi->rect.top = scalerCropRegion->top;
5644    }
5645    if (roi_x_max > crop_x_max) {
5646        roi_x_max = crop_x_max;
5647    }
5648    if (roi_y_max > crop_y_max) {
5649        roi_y_max = crop_y_max;
5650    }
5651    roi->rect.width = roi_x_max - roi->rect.left;
5652    roi->rect.height = roi_y_max - roi->rect.top;
5653    return true;
5654}
5655
5656/*===========================================================================
5657 * FUNCTION   : convertLandmarks
5658 *
5659 * DESCRIPTION: helper method to extract the landmarks from face detection info
5660 *
5661 * PARAMETERS :
5662 *   @face   : cam_rect_t struct to convert
5663 *   @landmarks : int32_t destination array
5664 *
5665 *
5666 *==========================================================================*/
5667void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
5668{
5669    landmarks[0] = (int32_t)face.left_eye_center.x;
5670    landmarks[1] = (int32_t)face.left_eye_center.y;
5671    landmarks[2] = (int32_t)face.right_eye_center.x;
5672    landmarks[3] = (int32_t)face.right_eye_center.y;
5673    landmarks[4] = (int32_t)face.mouth_center.x;
5674    landmarks[5] = (int32_t)face.mouth_center.y;
5675}
5676
5677#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
5678/*===========================================================================
5679 * FUNCTION   : initCapabilities
5680 *
5681 * DESCRIPTION: initialize camera capabilities in static data struct
5682 *
5683 * PARAMETERS :
5684 *   @cameraId  : camera Id
5685 *
5686 * RETURN     : int32_t type of status
5687 *              NO_ERROR  -- success
5688 *              none-zero failure code
5689 *==========================================================================*/
5690int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
5691{
5692    int rc = 0;
5693    mm_camera_vtbl_t *cameraHandle = NULL;
5694    QCamera3HeapMemory *capabilityHeap = NULL;
5695
5696    rc = camera_open((uint8_t)cameraId, &cameraHandle);
5697    if (rc || !cameraHandle) {
5698        ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
5699        goto open_failed;
5700    }
5701
5702    capabilityHeap = new QCamera3HeapMemory(1);
5703    if (capabilityHeap == NULL) {
5704        ALOGE("%s: creation of capabilityHeap failed", __func__);
5705        goto heap_creation_failed;
5706    }
5707    /* Allocate memory for capability buffer */
5708    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
5709    if(rc != OK) {
5710        ALOGE("%s: No memory for cappability", __func__);
5711        goto allocate_failed;
5712    }
5713
5714    /* Map memory for capability buffer */
5715    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
5716    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
5717                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
5718                                capabilityHeap->getFd(0),
5719                                sizeof(cam_capability_t));
5720    if(rc < 0) {
5721        ALOGE("%s: failed to map capability buffer", __func__);
5722        goto map_failed;
5723    }
5724
5725    /* Query Capability */
5726    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
5727    if(rc < 0) {
5728        ALOGE("%s: failed to query capability",__func__);
5729        goto query_failed;
5730    }
5731    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
5732    if (!gCamCapability[cameraId]) {
5733        ALOGE("%s: out of memory", __func__);
5734        goto query_failed;
5735    }
5736    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
5737                                        sizeof(cam_capability_t));
5738    rc = 0;
5739
5740query_failed:
5741    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
5742                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
5743map_failed:
5744    capabilityHeap->deallocate();
5745allocate_failed:
5746    delete capabilityHeap;
5747heap_creation_failed:
5748    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
5749    cameraHandle = NULL;
5750open_failed:
5751    return rc;
5752}
5753
5754/*==========================================================================
5755 * FUNCTION   : get3Aversion
5756 *
5757 * DESCRIPTION: get the Q3A S/W version
5758 *
5759 * PARAMETERS :
5760 *  @sw_version: Reference of Q3A structure which will hold version info upon
5761 *               return
5762 *
5763 * RETURN     : None
5764 *
5765 *==========================================================================*/
5766void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
5767{
5768    if(gCamCapability[mCameraId])
5769        sw_version = gCamCapability[mCameraId]->q3a_version;
5770    else
5771        ALOGE("%s:Capability structure NULL!", __func__);
5772}
5773
5774
5775/*===========================================================================
5776 * FUNCTION   : initParameters
5777 *
5778 * DESCRIPTION: initialize camera parameters
5779 *
5780 * PARAMETERS :
5781 *
5782 * RETURN     : int32_t type of status
5783 *              NO_ERROR  -- success
5784 *              none-zero failure code
5785 *==========================================================================*/
5786int QCamera3HardwareInterface::initParameters()
5787{
5788    int rc = 0;
5789
5790    //Allocate Set Param Buffer
5791    mParamHeap = new QCamera3HeapMemory(1);
5792    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
5793    if(rc != OK) {
5794        rc = NO_MEMORY;
5795        ALOGE("Failed to allocate SETPARM Heap memory");
5796        delete mParamHeap;
5797        mParamHeap = NULL;
5798        return rc;
5799    }
5800
5801    //Map memory for parameters buffer
5802    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
5803            CAM_MAPPING_BUF_TYPE_PARM_BUF,
5804            mParamHeap->getFd(0),
5805            sizeof(metadata_buffer_t));
5806    if(rc < 0) {
5807        ALOGE("%s:failed to map SETPARM buffer",__func__);
5808        rc = FAILED_TRANSACTION;
5809        mParamHeap->deallocate();
5810        delete mParamHeap;
5811        mParamHeap = NULL;
5812        return rc;
5813    }
5814
5815    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
5816
5817    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
5818    return rc;
5819}
5820
5821/*===========================================================================
5822 * FUNCTION   : deinitParameters
5823 *
5824 * DESCRIPTION: de-initialize camera parameters
5825 *
5826 * PARAMETERS :
5827 *
5828 * RETURN     : NONE
5829 *==========================================================================*/
5830void QCamera3HardwareInterface::deinitParameters()
5831{
5832    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
5833            CAM_MAPPING_BUF_TYPE_PARM_BUF);
5834
5835    mParamHeap->deallocate();
5836    delete mParamHeap;
5837    mParamHeap = NULL;
5838
5839    mParameters = NULL;
5840
5841    free(mPrevParameters);
5842    mPrevParameters = NULL;
5843}
5844
5845/*===========================================================================
5846 * FUNCTION   : calcMaxJpegSize
5847 *
5848 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
5849 *
5850 * PARAMETERS :
5851 *
5852 * RETURN     : max_jpeg_size
5853 *==========================================================================*/
5854size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
5855{
5856    size_t max_jpeg_size = 0;
5857    size_t temp_width, temp_height;
5858    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
5859            MAX_SIZES_CNT);
5860    for (size_t i = 0; i < count; i++) {
5861        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
5862        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
5863        if (temp_width * temp_height > max_jpeg_size ) {
5864            max_jpeg_size = temp_width * temp_height;
5865        }
5866    }
5867    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
5868    return max_jpeg_size;
5869}
5870
5871/*===========================================================================
5872 * FUNCTION   : getMaxRawSize
5873 *
5874 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
5875 *
5876 * PARAMETERS :
5877 *
5878 * RETURN     : Largest supported Raw Dimension
5879 *==========================================================================*/
5880cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
5881{
5882    int max_width = 0;
5883    cam_dimension_t maxRawSize;
5884
5885    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
5886    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
5887        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
5888            max_width = gCamCapability[camera_id]->raw_dim[i].width;
5889            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
5890        }
5891    }
5892    return maxRawSize;
5893}
5894
5895
5896/*===========================================================================
5897 * FUNCTION   : calcMaxJpegDim
5898 *
5899 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
5900 *
5901 * PARAMETERS :
5902 *
5903 * RETURN     : max_jpeg_dim
5904 *==========================================================================*/
5905cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
5906{
5907    cam_dimension_t max_jpeg_dim;
5908    cam_dimension_t curr_jpeg_dim;
5909    max_jpeg_dim.width = 0;
5910    max_jpeg_dim.height = 0;
5911    curr_jpeg_dim.width = 0;
5912    curr_jpeg_dim.height = 0;
5913    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
5914        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
5915        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
5916        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
5917            max_jpeg_dim.width * max_jpeg_dim.height ) {
5918            max_jpeg_dim.width = curr_jpeg_dim.width;
5919            max_jpeg_dim.height = curr_jpeg_dim.height;
5920        }
5921    }
5922    return max_jpeg_dim;
5923}
5924
5925/*===========================================================================
5926 * FUNCTION   : addStreamConfig
5927 *
5928 * DESCRIPTION: adds the stream configuration to the array
5929 *
5930 * PARAMETERS :
5931 * @available_stream_configs : pointer to stream configuration array
5932 * @scalar_format            : scalar format
5933 * @dim                      : configuration dimension
5934 * @config_type              : input or output configuration type
5935 *
5936 * RETURN     : NONE
5937 *==========================================================================*/
5938void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
5939        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
5940{
5941    available_stream_configs.add(scalar_format);
5942    available_stream_configs.add(dim.width);
5943    available_stream_configs.add(dim.height);
5944    available_stream_configs.add(config_type);
5945}
5946
5947
5948/*===========================================================================
5949 * FUNCTION   : initStaticMetadata
5950 *
5951 * DESCRIPTION: initialize the static metadata
5952 *
5953 * PARAMETERS :
5954 *   @cameraId  : camera Id
5955 *
5956 * RETURN     : int32_t type of status
5957 *              0  -- success
5958 *              non-zero failure code
5959 *==========================================================================*/
5960int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
5961{
5962    int rc = 0;
5963    CameraMetadata staticInfo;
5964    size_t count = 0;
5965    bool limitedDevice = false;
5966    char prop[PROPERTY_VALUE_MAX];
5967
5968    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
5969     * guaranteed, its advertised as limited device */
5970    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
5971            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
5972
5973    uint8_t supportedHwLvl = limitedDevice ?
5974            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
5975            // No capability check done here to distinguish LEVEL_FULL from
5976            // LEVEL_3 - assuming this HAL will not run on devices that only
5977            // meet FULL spec
5978            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
5979
5980    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
5981            &supportedHwLvl, 1);
5982
5983    bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
5984    /*HAL 3 only*/
5985    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
5986                    &gCamCapability[cameraId]->min_focus_distance, 1);
5987
5988    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
5989                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
5990
5991    /*should be using focal lengths but sensor doesn't provide that info now*/
5992    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
5993                      &gCamCapability[cameraId]->focal_length,
5994                      1);
5995
5996    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
5997                      gCamCapability[cameraId]->apertures,
5998                      gCamCapability[cameraId]->apertures_count);
5999
6000    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6001                gCamCapability[cameraId]->filter_densities,
6002                gCamCapability[cameraId]->filter_densities_count);
6003
6004
6005    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6006                      (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6007                      gCamCapability[cameraId]->optical_stab_modes_count);
6008
6009    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
6010            gCamCapability[cameraId]->lens_shading_map_size.height};
6011    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6012                      lens_shading_map_size,
6013                      sizeof(lens_shading_map_size)/sizeof(int32_t));
6014
6015    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6016            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6017
6018    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6019            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6020
6021    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6022            &gCamCapability[cameraId]->max_frame_duration, 1);
6023
6024    camera_metadata_rational baseGainFactor = {
6025            gCamCapability[cameraId]->base_gain_factor.numerator,
6026            gCamCapability[cameraId]->base_gain_factor.denominator};
6027    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6028                      &baseGainFactor, 1);
6029
6030    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6031                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6032
6033    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6034            gCamCapability[cameraId]->pixel_array_size.height};
6035    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6036                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6037
6038    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6039                                                gCamCapability[cameraId]->active_array_size.top,
6040                                                gCamCapability[cameraId]->active_array_size.width,
6041                                                gCamCapability[cameraId]->active_array_size.height};
6042    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6043                      active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6044
6045    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6046            &gCamCapability[cameraId]->white_level, 1);
6047
6048    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6049            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6050
6051    bool hasBlackRegions = false;
6052    if (gCamCapability[cameraId]->optical_black_region_count != 0 &&
6053            gCamCapability[cameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
6054        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6055        for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i+=4) {
6056            // Left
6057            opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6058            //Top
6059            opticalBlackRegions[i + 1] = gCamCapability[cameraId]->optical_black_regions[i + 1];
6060            // Width
6061            opticalBlackRegions[i + 2] = gCamCapability[cameraId]->optical_black_regions[i + 2] -
6062                    gCamCapability[cameraId]->optical_black_regions[i];
6063            // Height
6064            opticalBlackRegions[i + 3] = gCamCapability[cameraId]->optical_black_regions[i + 3] -
6065                    gCamCapability[cameraId]->optical_black_regions[i + 1];
6066        }
6067        staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6068                opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6069        hasBlackRegions = true;
6070    }
6071
6072    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6073                      &gCamCapability[cameraId]->flash_charge_duration, 1);
6074
6075    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6076                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6077
6078    uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
6079            ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
6080            ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
6081    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6082            &timestampSource, 1);
6083
6084    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6085                      &gCamCapability[cameraId]->histogram_size, 1);
6086
6087    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6088            &gCamCapability[cameraId]->max_histogram_count, 1);
6089
6090    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
6091            gCamCapability[cameraId]->sharpness_map_size.height};
6092
6093    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6094            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6095
6096    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6097            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6098
6099    int32_t scalar_formats[] = {
6100            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6101            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6102            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6103            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6104            HAL_PIXEL_FORMAT_RAW10,
6105            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6106    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6107    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6108                      scalar_formats,
6109                      scalar_formats_count);
6110
6111    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6112    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6113    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6114            count, MAX_SIZES_CNT, available_processed_sizes);
6115    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6116            available_processed_sizes, count * 2);
6117
6118    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6119    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6120    makeTable(gCamCapability[cameraId]->raw_dim,
6121            count, MAX_SIZES_CNT, available_raw_sizes);
6122    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6123            available_raw_sizes, count * 2);
6124
6125    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6126    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6127    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6128            count, MAX_SIZES_CNT, available_fps_ranges);
6129    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6130            available_fps_ranges, count * 2);
6131
6132    camera_metadata_rational exposureCompensationStep = {
6133            gCamCapability[cameraId]->exp_compensation_step.numerator,
6134            gCamCapability[cameraId]->exp_compensation_step.denominator};
6135    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6136                      &exposureCompensationStep, 1);
6137
6138    Vector<uint8_t> availableVstabModes;
6139    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6140    char eis_prop[PROPERTY_VALUE_MAX];
6141    memset(eis_prop, 0, sizeof(eis_prop));
6142    property_get("persist.camera.eis.enable", eis_prop, "0");
6143    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
6144    if (facingBack && eis_prop_set) {
6145        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6146    }
6147    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6148                      availableVstabModes.array(), availableVstabModes.size());
6149
6150    /*HAL 1 and HAL 3 common*/
6151    float maxZoom = 4;
6152    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6153            &maxZoom, 1);
6154
6155    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
6156    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6157
6158    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6159    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6160        max3aRegions[2] = 0; /* AF not supported */
6161    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6162            max3aRegions, 3);
6163
6164    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6165    memset(prop, 0, sizeof(prop));
6166    property_get("persist.camera.facedetect", prop, "1");
6167    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6168    CDBG("%s: Support face detection mode: %d",
6169            __func__, supportedFaceDetectMode);
6170
6171    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
6172    Vector<uint8_t> availableFaceDetectModes;
6173    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6174    if (supportedFaceDetectMode == 1) {
6175        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6176    } else if (supportedFaceDetectMode == 2) {
6177        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6178    } else if (supportedFaceDetectMode == 3) {
6179        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6180        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6181    } else {
6182        maxFaces = 0;
6183    }
6184    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6185            availableFaceDetectModes.array(),
6186            availableFaceDetectModes.size());
6187    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6188            (int32_t *)&maxFaces, 1);
6189
6190    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
6191                                           gCamCapability[cameraId]->exposure_compensation_max};
6192    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6193            exposureCompensationRange,
6194            sizeof(exposureCompensationRange)/sizeof(int32_t));
6195
6196    uint8_t lensFacing = (facingBack) ?
6197            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6198    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6199
6200    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6201                      available_thumbnail_sizes,
6202                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6203
6204    /*all sizes will be clubbed into this tag*/
6205    int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
6206    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6207    size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
6208            count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
6209            gCamCapability[cameraId]->max_downscale_factor);
6210    /*android.scaler.availableStreamConfigurations*/
6211    size_t max_stream_configs_size = count * scalar_formats_count * 4;
6212    Vector<int32_t> available_stream_configs;
6213    cam_dimension_t active_array_dim;
6214    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6215    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6216    /* Add input/output stream configurations for each scalar formats*/
6217    for (size_t j = 0; j < scalar_formats_count; j++) {
6218        switch (scalar_formats[j]) {
6219        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6220        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6221        case HAL_PIXEL_FORMAT_RAW10:
6222            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
6223                addStreamConfig(available_stream_configs, scalar_formats[j],
6224                        gCamCapability[cameraId]->raw_dim[i],
6225                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6226            }
6227            break;
6228        case HAL_PIXEL_FORMAT_BLOB:
6229            cam_dimension_t jpeg_size;
6230            for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
6231                jpeg_size.width  = available_jpeg_sizes[i*2];
6232                jpeg_size.height = available_jpeg_sizes[i*2+1];
6233                addStreamConfig(available_stream_configs, scalar_formats[j],
6234                        jpeg_size,
6235                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6236            }
6237            break;
6238        case HAL_PIXEL_FORMAT_YCbCr_420_888:
6239        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
6240        default:
6241            cam_dimension_t largest_picture_size;
6242            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
6243            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
6244                addStreamConfig(available_stream_configs, scalar_formats[j],
6245                        gCamCapability[cameraId]->picture_sizes_tbl[i],
6246                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6247                /* Book keep largest */
6248                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
6249                        >= largest_picture_size.width &&
6250                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
6251                        >= largest_picture_size.height)
6252                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
6253            }
6254            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
6255            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
6256                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
6257                 addStreamConfig(available_stream_configs, scalar_formats[j],
6258                         largest_picture_size,
6259                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
6260            }
6261            break;
6262        }
6263    }
6264
6265    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6266                      available_stream_configs.array(), available_stream_configs.size());
6267    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
6268    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
6269
6270    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6271    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6272
6273    /* android.scaler.availableMinFrameDurations */
6274    int64_t available_min_durations[max_stream_configs_size];
6275    size_t idx = 0;
6276    for (size_t j = 0; j < scalar_formats_count; j++) {
6277        switch (scalar_formats[j]) {
6278        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6279        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6280        case HAL_PIXEL_FORMAT_RAW10:
6281            for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
6282                available_min_durations[idx] = scalar_formats[j];
6283                available_min_durations[idx+1] =
6284                    gCamCapability[cameraId]->raw_dim[i].width;
6285                available_min_durations[idx+2] =
6286                    gCamCapability[cameraId]->raw_dim[i].height;
6287                available_min_durations[idx+3] =
6288                    gCamCapability[cameraId]->raw_min_duration[i];
6289                idx+=4;
6290            }
6291            break;
6292        default:
6293            for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
6294                available_min_durations[idx] = scalar_formats[j];
6295                available_min_durations[idx+1] =
6296                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6297                available_min_durations[idx+2] =
6298                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6299                available_min_durations[idx+3] =
6300                    gCamCapability[cameraId]->picture_min_duration[i];
6301                idx+=4;
6302            }
6303            break;
6304        }
6305    }
6306    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
6307                      &available_min_durations[0], idx);
6308
6309    Vector<int32_t> available_hfr_configs;
6310    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
6311        int32_t fps = 0;
6312        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
6313        case CAM_HFR_MODE_60FPS:
6314            fps = 60;
6315            break;
6316        case CAM_HFR_MODE_90FPS:
6317            fps = 90;
6318            break;
6319        case CAM_HFR_MODE_120FPS:
6320            fps = 120;
6321            break;
6322        case CAM_HFR_MODE_150FPS:
6323            fps = 150;
6324            break;
6325        case CAM_HFR_MODE_180FPS:
6326            fps = 180;
6327            break;
6328        case CAM_HFR_MODE_210FPS:
6329            fps = 210;
6330            break;
6331        case CAM_HFR_MODE_240FPS:
6332            fps = 240;
6333            break;
6334        case CAM_HFR_MODE_480FPS:
6335            fps = 480;
6336            break;
6337        case CAM_HFR_MODE_OFF:
6338        case CAM_HFR_MODE_MAX:
6339        default:
6340            break;
6341        }
6342
6343        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
6344        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
6345            /* For each HFR frame rate, need to advertise one variable fps range
6346             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
6347             * [120, 120]. While camcorder preview alone is running [30, 120] is
6348             * set by the app. When video recording is started, [120, 120] is
6349             * set. This way sensor configuration does not change when recording
6350             * is started */
6351
6352            /* (width, height, fps_min, fps_max, batch_size_max) */
6353            available_hfr_configs.add(
6354                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6355            available_hfr_configs.add(
6356                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6357            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
6358            available_hfr_configs.add(fps);
6359            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6360
6361            /* (width, height, fps_min, fps_max, batch_size_max) */
6362            available_hfr_configs.add(
6363                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
6364            available_hfr_configs.add(
6365                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
6366            available_hfr_configs.add(fps);
6367            available_hfr_configs.add(fps);
6368            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
6369       }
6370    }
6371    //Advertise HFR capability only if the property is set
6372    memset(prop, 0, sizeof(prop));
6373    property_get("persist.camera.hal3hfr.enable", prop, "1");
6374    uint8_t hfrEnable = (uint8_t)atoi(prop);
6375
6376    if(hfrEnable && available_hfr_configs.array()) {
6377        staticInfo.update(
6378                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
6379                available_hfr_configs.array(), available_hfr_configs.size());
6380    }
6381
6382    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
6383    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
6384                      &max_jpeg_size, 1);
6385
6386    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
6387    size_t size = 0;
6388    count = CAM_EFFECT_MODE_MAX;
6389    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
6390    for (size_t i = 0; i < count; i++) {
6391        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6392                gCamCapability[cameraId]->supported_effects[i]);
6393        if (NAME_NOT_FOUND != val) {
6394            avail_effects[size] = (uint8_t)val;
6395            size++;
6396        }
6397    }
6398    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
6399                      avail_effects,
6400                      size);
6401
6402    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
6403    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
6404    size_t supported_scene_modes_cnt = 0;
6405    count = CAM_SCENE_MODE_MAX;
6406    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
6407    for (size_t i = 0; i < count; i++) {
6408        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
6409                CAM_SCENE_MODE_OFF) {
6410            int val = lookupFwkName(SCENE_MODES_MAP,
6411                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
6412                    gCamCapability[cameraId]->supported_scene_modes[i]);
6413            if (NAME_NOT_FOUND != val) {
6414                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
6415                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
6416                supported_scene_modes_cnt++;
6417            }
6418        }
6419    }
6420    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6421                      avail_scene_modes,
6422                      supported_scene_modes_cnt);
6423
6424    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
6425    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
6426                      supported_scene_modes_cnt,
6427                      CAM_SCENE_MODE_MAX,
6428                      scene_mode_overrides,
6429                      supported_indexes,
6430                      cameraId);
6431
6432    if (supported_scene_modes_cnt == 0) {
6433        supported_scene_modes_cnt = 1;
6434        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
6435    }
6436
6437    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
6438            scene_mode_overrides, supported_scene_modes_cnt * 3);
6439
6440    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
6441                                         ANDROID_CONTROL_MODE_AUTO,
6442                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
6443    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
6444            available_control_modes,
6445            3);
6446
6447    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
6448    size = 0;
6449    count = CAM_ANTIBANDING_MODE_MAX;
6450    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
6451    for (size_t i = 0; i < count; i++) {
6452        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6453                gCamCapability[cameraId]->supported_antibandings[i]);
6454        if (NAME_NOT_FOUND != val) {
6455            avail_antibanding_modes[size] = (uint8_t)val;
6456            size++;
6457        }
6458
6459    }
6460    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6461                      avail_antibanding_modes,
6462                      size);
6463
6464    uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
6465    size = 0;
6466    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
6467    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
6468    if (0 == count) {
6469        avail_abberation_modes[0] =
6470                ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6471        size++;
6472    } else {
6473        for (size_t i = 0; i < count; i++) {
6474            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6475                    gCamCapability[cameraId]->aberration_modes[i]);
6476            if (NAME_NOT_FOUND != val) {
6477                avail_abberation_modes[size] = (uint8_t)val;
6478                size++;
6479            } else {
6480                ALOGE("%s: Invalid CAC mode %d", __func__,
6481                        gCamCapability[cameraId]->aberration_modes[i]);
6482                break;
6483            }
6484        }
6485
6486    }
6487    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6488            avail_abberation_modes,
6489            size);
6490
6491    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
6492    size = 0;
6493    count = CAM_FOCUS_MODE_MAX;
6494    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
6495    for (size_t i = 0; i < count; i++) {
6496        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
6497                gCamCapability[cameraId]->supported_focus_modes[i]);
6498        if (NAME_NOT_FOUND != val) {
6499            avail_af_modes[size] = (uint8_t)val;
6500            size++;
6501        }
6502    }
6503    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
6504                      avail_af_modes,
6505                      size);
6506
6507    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
6508    size = 0;
6509    count = CAM_WB_MODE_MAX;
6510    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
6511    for (size_t i = 0; i < count; i++) {
6512        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6513                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
6514                gCamCapability[cameraId]->supported_white_balances[i]);
6515        if (NAME_NOT_FOUND != val) {
6516            avail_awb_modes[size] = (uint8_t)val;
6517            size++;
6518        }
6519    }
6520    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
6521                      avail_awb_modes,
6522                      size);
6523
6524    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
6525    count = CAM_FLASH_FIRING_LEVEL_MAX;
6526    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
6527            count);
6528    for (size_t i = 0; i < count; i++) {
6529        available_flash_levels[i] =
6530                gCamCapability[cameraId]->supported_firing_levels[i];
6531    }
6532    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
6533            available_flash_levels, count);
6534
6535    uint8_t flashAvailable;
6536    if (gCamCapability[cameraId]->flash_available)
6537        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
6538    else
6539        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
6540    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
6541            &flashAvailable, 1);
6542
6543    Vector<uint8_t> avail_ae_modes;
6544    count = CAM_AE_MODE_MAX;
6545    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
6546    for (size_t i = 0; i < count; i++) {
6547        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
6548    }
6549    if (flashAvailable) {
6550        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
6551        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
6552    }
6553    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
6554                      avail_ae_modes.array(),
6555                      avail_ae_modes.size());
6556
6557    int32_t sensitivity_range[2];
6558    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
6559    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
6560    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
6561                      sensitivity_range,
6562                      sizeof(sensitivity_range) / sizeof(int32_t));
6563
6564    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6565                      &gCamCapability[cameraId]->max_analog_sensitivity,
6566                      1);
6567
6568    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
6569    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
6570                      &sensor_orientation,
6571                      1);
6572
6573    int32_t max_output_streams[] = {
6574            MAX_STALLING_STREAMS,
6575            MAX_PROCESSED_STREAMS,
6576            MAX_RAW_STREAMS};
6577    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
6578            max_output_streams,
6579            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
6580
6581    uint8_t avail_leds = 0;
6582    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
6583                      &avail_leds, 0);
6584
6585    uint8_t focus_dist_calibrated;
6586    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
6587            gCamCapability[cameraId]->focus_dist_calibrated);
6588    if (NAME_NOT_FOUND != val) {
6589        focus_dist_calibrated = (uint8_t)val;
6590        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6591                     &focus_dist_calibrated, 1);
6592    }
6593
6594    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
6595    size = 0;
6596    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
6597            MAX_TEST_PATTERN_CNT);
6598    for (size_t i = 0; i < count; i++) {
6599        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
6600                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
6601        if (NAME_NOT_FOUND != testpatternMode) {
6602            avail_testpattern_modes[size] = testpatternMode;
6603            size++;
6604        }
6605    }
6606    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6607                      avail_testpattern_modes,
6608                      size);
6609
6610    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
6611    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
6612                      &max_pipeline_depth,
6613                      1);
6614
6615    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
6616    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6617                      &partial_result_count,
6618                       1);
6619
6620    int32_t max_stall_duration = MAX_REPROCESS_STALL;
6621    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
6622
6623    Vector<uint8_t> available_capabilities;
6624    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
6625    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
6626    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
6627    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
6628    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
6629    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
6630    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
6631    if (hfrEnable && available_hfr_configs.array()) {
6632        available_capabilities.add(
6633                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
6634    }
6635
6636    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6637        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
6638    }
6639    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6640            available_capabilities.array(),
6641            available_capabilities.size());
6642
6643    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
6644    //BURST_CAPTURE.
6645    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6646            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
6647
6648    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6649            &aeLockAvailable, 1);
6650
6651    //awbLockAvailable to be set to true if capabilities has
6652    //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
6653    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
6654            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
6655
6656    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6657            &awbLockAvailable, 1);
6658
6659    int32_t max_input_streams = 1;
6660    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6661                      &max_input_streams,
6662                      1);
6663
6664    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
6665    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
6666            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
6667            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
6668            HAL_PIXEL_FORMAT_YCbCr_420_888};
6669    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6670                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
6671
6672    int32_t max_latency = (limitedDevice) ?
6673            CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
6674    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
6675                      &max_latency,
6676                      1);
6677
6678    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
6679                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
6680    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6681            available_hot_pixel_modes,
6682            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
6683
6684    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
6685                                         ANDROID_SHADING_MODE_FAST,
6686                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
6687    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
6688                      available_shading_modes,
6689                      3);
6690
6691    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
6692                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
6693    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6694                      available_lens_shading_map_modes,
6695                      2);
6696
6697    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
6698                                      ANDROID_EDGE_MODE_FAST,
6699                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
6700                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
6701    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6702            available_edge_modes,
6703            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
6704
6705    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
6706                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
6707                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
6708                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
6709                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
6710    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6711            available_noise_red_modes,
6712            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
6713
6714    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
6715                                         ANDROID_TONEMAP_MODE_FAST,
6716                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
6717    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6718            available_tonemap_modes,
6719            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
6720
6721    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
6722    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6723            available_hot_pixel_map_modes,
6724            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
6725
6726    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6727            gCamCapability[cameraId]->reference_illuminant1);
6728    if (NAME_NOT_FOUND != val) {
6729        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6730        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
6731    }
6732
6733    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
6734            gCamCapability[cameraId]->reference_illuminant2);
6735    if (NAME_NOT_FOUND != val) {
6736        uint8_t fwkReferenceIlluminant = (uint8_t)val;
6737        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
6738    }
6739
6740    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
6741            (void *)gCamCapability[cameraId]->forward_matrix1,
6742            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6743
6744    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
6745            (void *)gCamCapability[cameraId]->forward_matrix2,
6746            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
6747
6748    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
6749            (void *)gCamCapability[cameraId]->color_transform1,
6750            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6751
6752    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
6753            (void *)gCamCapability[cameraId]->color_transform2,
6754            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
6755
6756    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
6757            (void *)gCamCapability[cameraId]->calibration_transform1,
6758            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6759
6760    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
6761            (void *)gCamCapability[cameraId]->calibration_transform2,
6762            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
6763
6764    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
6765       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
6766       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
6767       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
6768       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
6769       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6770       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
6771       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
6772       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
6773       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
6774       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
6775       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
6776       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6777       ANDROID_JPEG_GPS_COORDINATES,
6778       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
6779       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
6780       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
6781       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6782       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
6783       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
6784       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
6785       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
6786       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
6787       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
6788       ANDROID_STATISTICS_FACE_DETECT_MODE,
6789       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6790       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
6791       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6792       ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE};
6793
6794    size_t request_keys_cnt =
6795            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
6796    Vector<int32_t> available_request_keys;
6797    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
6798    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6799        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
6800    }
6801
6802    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
6803            available_request_keys.array(), available_request_keys.size());
6804
6805    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
6806       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
6807       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
6808       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
6809       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
6810       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
6811       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
6812       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
6813       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
6814       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
6815       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
6816       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
6817       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
6818       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
6819       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6820       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
6821       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
6822       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
6823       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
6824       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6825       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
6826       ANDROID_STATISTICS_FACE_SCORES,
6827       ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
6828       ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
6829       ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
6830    size_t result_keys_cnt =
6831            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
6832
6833    Vector<int32_t> available_result_keys;
6834    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
6835    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
6836        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
6837    }
6838    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
6839       available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
6840       available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
6841    }
6842    if (supportedFaceDetectMode == 1) {
6843        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
6844        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
6845    } else if ((supportedFaceDetectMode == 2) ||
6846            (supportedFaceDetectMode == 3)) {
6847        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
6848        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
6849    }
6850    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6851            available_result_keys.array(), available_result_keys.size());
6852
6853    int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
6854       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6855       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
6856       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
6857       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
6858       ANDROID_SCALER_CROPPING_TYPE,
6859       ANDROID_SYNC_MAX_LATENCY,
6860       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6861       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
6862       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6863       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
6864       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
6865       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6866       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6867       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6868       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6869       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6870       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6871       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
6872       ANDROID_LENS_FACING,
6873       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
6874       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
6875       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
6876       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
6877       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6878       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
6879       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
6880       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
6881       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
6882       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
6883       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
6884       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
6885       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6886       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6887       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6888       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6889       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
6890       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
6891       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
6892       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6893       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6894       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6895       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6896       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
6897       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
6898       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
6899       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
6900       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
6901       ANDROID_TONEMAP_MAX_CURVE_POINTS,
6902       ANDROID_CONTROL_AVAILABLE_MODES,
6903       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
6904       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
6905       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
6906       ANDROID_SHADING_AVAILABLE_MODES,
6907       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
6908
6909    Vector<int32_t> available_characteristics_keys;
6910    available_characteristics_keys.appendArray(characteristics_keys_basic,
6911            sizeof(characteristics_keys_basic)/sizeof(int32_t));
6912    if (hasBlackRegions) {
6913        available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
6914    }
6915    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
6916                      available_characteristics_keys.array(),
6917                      available_characteristics_keys.size());
6918
6919    /*available stall durations depend on the hw + sw and will be different for different devices */
6920    /*have to add for raw after implementation*/
6921    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
6922    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
6923
6924    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6925    size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
6926            MAX_SIZES_CNT);
6927    size_t available_stall_size = count * 4;
6928    int64_t available_stall_durations[available_stall_size];
6929    idx = 0;
6930    for (uint32_t j = 0; j < stall_formats_count; j++) {
6931       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
6932          for (uint32_t i = 0; i < count; i++) {
6933             available_stall_durations[idx]   = stall_formats[j];
6934             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
6935             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
6936             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
6937             idx+=4;
6938          }
6939       } else {
6940          for (uint32_t i = 0; i < raw_count; i++) {
6941             available_stall_durations[idx]   = stall_formats[j];
6942             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
6943             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
6944             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
6945             idx+=4;
6946          }
6947       }
6948    }
6949    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
6950                      available_stall_durations,
6951                      idx);
6952    //QCAMERA3_OPAQUE_RAW
6953    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6954    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6955    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
6956    case LEGACY_RAW:
6957        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6958            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
6959        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6960            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
6961        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6962            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
6963        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
6964        break;
6965    case MIPI_RAW:
6966        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
6967            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
6968        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
6969            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
6970        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
6971            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
6972        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
6973        break;
6974    default:
6975        ALOGE("%s: unknown opaque_raw_format %d", __func__,
6976                gCamCapability[cameraId]->opaque_raw_fmt);
6977        break;
6978    }
6979    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
6980
6981    int32_t strides[3*raw_count];
6982    for (size_t i = 0; i < raw_count; i++) {
6983        cam_stream_buf_plane_info_t buf_planes;
6984        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
6985        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
6986        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
6987            &gCamCapability[cameraId]->padding_info, &buf_planes);
6988        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
6989    }
6990    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
6991            3*raw_count);
6992
6993    gStaticMetadata[cameraId] = staticInfo.release();
6994    return rc;
6995}
6996
6997/*===========================================================================
6998 * FUNCTION   : makeTable
6999 *
7000 * DESCRIPTION: make a table of sizes
7001 *
7002 * PARAMETERS :
7003 *
7004 *
7005 *==========================================================================*/
7006void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7007        size_t max_size, int32_t *sizeTable)
7008{
7009    size_t j = 0;
7010    if (size > max_size) {
7011       size = max_size;
7012    }
7013    for (size_t i = 0; i < size; i++) {
7014        sizeTable[j] = dimTable[i].width;
7015        sizeTable[j+1] = dimTable[i].height;
7016        j+=2;
7017    }
7018}
7019
7020/*===========================================================================
7021 * FUNCTION   : makeFPSTable
7022 *
7023 * DESCRIPTION: make a table of fps ranges
7024 *
7025 * PARAMETERS :
7026 *
7027 *==========================================================================*/
7028void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7029        size_t max_size, int32_t *fpsRangesTable)
7030{
7031    size_t j = 0;
7032    if (size > max_size) {
7033       size = max_size;
7034    }
7035    for (size_t i = 0; i < size; i++) {
7036        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7037        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7038        j+=2;
7039    }
7040}
7041
7042/*===========================================================================
7043 * FUNCTION   : makeOverridesList
7044 *
7045 * DESCRIPTION: make a list of scene mode overrides
7046 *
7047 * PARAMETERS :
7048 *
7049 *
7050 *==========================================================================*/
7051void QCamera3HardwareInterface::makeOverridesList(
7052        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7053        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7054{
7055    /*daemon will give a list of overrides for all scene modes.
7056      However we should send the fwk only the overrides for the scene modes
7057      supported by the framework*/
7058    size_t j = 0;
7059    if (size > max_size) {
7060       size = max_size;
7061    }
7062    size_t focus_count = CAM_FOCUS_MODE_MAX;
7063    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7064            focus_count);
7065    for (size_t i = 0; i < size; i++) {
7066        bool supt = false;
7067        size_t index = supported_indexes[i];
7068        overridesList[j] = gCamCapability[camera_id]->flash_available ?
7069                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7070        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7071                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7072                overridesTable[index].awb_mode);
7073        if (NAME_NOT_FOUND != val) {
7074            overridesList[j+1] = (uint8_t)val;
7075        }
7076        uint8_t focus_override = overridesTable[index].af_mode;
7077        for (size_t k = 0; k < focus_count; k++) {
7078           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7079              supt = true;
7080              break;
7081           }
7082        }
7083        if (supt) {
7084            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7085                    focus_override);
7086            if (NAME_NOT_FOUND != val) {
7087                overridesList[j+2] = (uint8_t)val;
7088            }
7089        } else {
7090           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7091        }
7092        j+=3;
7093    }
7094}
7095
7096/*===========================================================================
7097 * FUNCTION   : filterJpegSizes
7098 *
7099 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7100 *              could be downscaled to
7101 *
7102 * PARAMETERS :
7103 *
7104 * RETURN     : length of jpegSizes array
7105 *==========================================================================*/
7106
7107size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7108        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7109        uint8_t downscale_factor)
7110{
7111    if (0 == downscale_factor) {
7112        downscale_factor = 1;
7113    }
7114
7115    int32_t min_width = active_array_size.width / downscale_factor;
7116    int32_t min_height = active_array_size.height / downscale_factor;
7117    size_t jpegSizesCnt = 0;
7118    if (processedSizesCnt > maxCount) {
7119        processedSizesCnt = maxCount;
7120    }
7121    for (size_t i = 0; i < processedSizesCnt; i+=2) {
7122        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7123            jpegSizes[jpegSizesCnt] = processedSizes[i];
7124            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7125            jpegSizesCnt += 2;
7126        }
7127    }
7128    return jpegSizesCnt;
7129}
7130
7131/*===========================================================================
7132 * FUNCTION   : getPreviewHalPixelFormat
7133 *
7134 * DESCRIPTION: convert the format to type recognized by framework
7135 *
7136 * PARAMETERS : format : the format from backend
7137 *
7138 ** RETURN    : format recognized by framework
7139 *
7140 *==========================================================================*/
7141int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
7142{
7143    int32_t halPixelFormat;
7144
7145    switch (format) {
7146    case CAM_FORMAT_YUV_420_NV12:
7147        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
7148        break;
7149    case CAM_FORMAT_YUV_420_NV21:
7150        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7151        break;
7152    case CAM_FORMAT_YUV_420_NV21_ADRENO:
7153        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
7154        break;
7155    case CAM_FORMAT_YUV_420_YV12:
7156        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
7157        break;
7158    case CAM_FORMAT_YUV_422_NV16:
7159    case CAM_FORMAT_YUV_422_NV61:
7160    default:
7161        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
7162        break;
7163    }
7164    return halPixelFormat;
7165}
7166
7167/*===========================================================================
7168 * FUNCTION   : computeNoiseModelEntryS
7169 *
7170 * DESCRIPTION: function to map a given sensitivity to the S noise
7171 *              model parameters in the DNG noise model.
7172 *
7173 * PARAMETERS : sens : the sensor sensitivity
7174 *
7175 ** RETURN    : S (sensor amplification) noise
7176 *
7177 *==========================================================================*/
7178double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7179    double s = gCamCapability[mCameraId]->gradient_S * sens +
7180            gCamCapability[mCameraId]->offset_S;
7181    return ((s < 0.0) ? 0.0 : s);
7182}
7183
7184/*===========================================================================
7185 * FUNCTION   : computeNoiseModelEntryO
7186 *
7187 * DESCRIPTION: function to map a given sensitivity to the O noise
7188 *              model parameters in the DNG noise model.
7189 *
7190 * PARAMETERS : sens : the sensor sensitivity
7191 *
7192 ** RETURN    : O (sensor readout) noise
7193 *
7194 *==========================================================================*/
7195double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
7196    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
7197    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
7198            1.0 : (1.0 * sens / max_analog_sens);
7199    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
7200            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
7201    return ((o < 0.0) ? 0.0 : o);
7202}
7203
7204/*===========================================================================
7205 * FUNCTION   : getSensorSensitivity
7206 *
7207 * DESCRIPTION: convert iso_mode to an integer value
7208 *
7209 * PARAMETERS : iso_mode : the iso_mode supported by sensor
7210 *
7211 ** RETURN    : sensitivity supported by sensor
7212 *
7213 *==========================================================================*/
7214int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
7215{
7216    int32_t sensitivity;
7217
7218    switch (iso_mode) {
7219    case CAM_ISO_MODE_100:
7220        sensitivity = 100;
7221        break;
7222    case CAM_ISO_MODE_200:
7223        sensitivity = 200;
7224        break;
7225    case CAM_ISO_MODE_400:
7226        sensitivity = 400;
7227        break;
7228    case CAM_ISO_MODE_800:
7229        sensitivity = 800;
7230        break;
7231    case CAM_ISO_MODE_1600:
7232        sensitivity = 1600;
7233        break;
7234    default:
7235        sensitivity = -1;
7236        break;
7237    }
7238    return sensitivity;
7239}
7240
7241/*===========================================================================
7242 * FUNCTION   : getCamInfo
7243 *
7244 * DESCRIPTION: query camera capabilities
7245 *
7246 * PARAMETERS :
7247 *   @cameraId  : camera Id
7248 *   @info      : camera info struct to be filled in with camera capabilities
7249 *
7250 * RETURN     : int type of status
7251 *              NO_ERROR  -- success
7252 *              none-zero failure code
7253 *==========================================================================*/
7254int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
7255        struct camera_info *info)
7256{
7257    ATRACE_CALL();
7258    int rc = 0;
7259
7260    pthread_mutex_lock(&gCamLock);
7261    if (NULL == gCamCapability[cameraId]) {
7262        rc = initCapabilities(cameraId);
7263        if (rc < 0) {
7264            pthread_mutex_unlock(&gCamLock);
7265            return rc;
7266        }
7267    }
7268
7269    if (NULL == gStaticMetadata[cameraId]) {
7270        rc = initStaticMetadata(cameraId);
7271        if (rc < 0) {
7272            pthread_mutex_unlock(&gCamLock);
7273            return rc;
7274        }
7275    }
7276
7277    switch(gCamCapability[cameraId]->position) {
7278    case CAM_POSITION_BACK:
7279        info->facing = CAMERA_FACING_BACK;
7280        break;
7281
7282    case CAM_POSITION_FRONT:
7283        info->facing = CAMERA_FACING_FRONT;
7284        break;
7285
7286    default:
7287        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
7288        rc = -1;
7289        break;
7290    }
7291
7292
7293    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
7294    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
7295    info->static_camera_characteristics = gStaticMetadata[cameraId];
7296
7297    //For now assume both cameras can operate independently.
7298    info->conflicting_devices = NULL;
7299    info->conflicting_devices_length = 0;
7300
7301    //resource cost is 100 * MIN(1.0, m/M),
7302    //where m is throughput requirement with maximum stream configuration
7303    //and M is CPP maximum throughput.
7304    float max_fps = 0.0;
7305    for (uint32_t i = 0;
7306            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
7307        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
7308            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
7309    }
7310    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
7311            gCamCapability[cameraId]->active_array_size.width *
7312            gCamCapability[cameraId]->active_array_size.height * max_fps /
7313            gCamCapability[cameraId]->max_pixel_bandwidth;
7314    info->resource_cost = 100 * MIN(1.0, ratio);
7315    ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
7316            info->resource_cost);
7317
7318    pthread_mutex_unlock(&gCamLock);
7319    return rc;
7320}
7321
7322/*===========================================================================
7323 * FUNCTION   : translateCapabilityToMetadata
7324 *
7325 * DESCRIPTION: translate the capability into camera_metadata_t
7326 *
7327 * PARAMETERS : type of the request
7328 *
7329 *
7330 * RETURN     : success: camera_metadata_t*
7331 *              failure: NULL
7332 *
7333 *==========================================================================*/
7334camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
7335{
7336    if (mDefaultMetadata[type] != NULL) {
7337        return mDefaultMetadata[type];
7338    }
7339    //first time we are handling this request
7340    //fill up the metadata structure using the wrapper class
7341    CameraMetadata settings;
7342    //translate from cam_capability_t to camera_metadata_tag_t
7343    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
7344    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
7345    int32_t defaultRequestID = 0;
7346    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
7347
7348    /* OIS disable */
7349    char ois_prop[PROPERTY_VALUE_MAX];
7350    memset(ois_prop, 0, sizeof(ois_prop));
7351    property_get("persist.camera.ois.disable", ois_prop, "0");
7352    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
7353
7354    /* Force video to use OIS */
7355    char videoOisProp[PROPERTY_VALUE_MAX];
7356    memset(videoOisProp, 0, sizeof(videoOisProp));
7357    property_get("persist.camera.ois.video", videoOisProp, "1");
7358    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
7359
7360    // EIS enable/disable
7361    char eis_prop[PROPERTY_VALUE_MAX];
7362    memset(eis_prop, 0, sizeof(eis_prop));
7363    property_get("persist.camera.eis.enable", eis_prop, "0");
7364    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
7365
7366    // Hybrid AE enable/disable
7367    char hybrid_ae_prop[PROPERTY_VALUE_MAX];
7368    memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
7369    property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
7370    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
7371
7372    const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
7373    // This is a bit hacky. EIS is enabled only when the above setprop
7374    // is set to non-zero value and on back camera (for 2015 Nexus).
7375    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
7376    // configureStream is called before this function. In other words,
7377    // we cannot guarantee the app will call configureStream before
7378    // calling createDefaultRequest.
7379    const bool eisEnabled = facingBack && eis_prop_set;
7380
7381    uint8_t controlIntent = 0;
7382    uint8_t focusMode;
7383    uint8_t vsMode;
7384    uint8_t optStabMode;
7385    uint8_t cacMode;
7386    uint8_t edge_mode;
7387    uint8_t noise_red_mode;
7388    uint8_t tonemap_mode;
7389    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7390    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7391    switch (type) {
7392      case CAMERA3_TEMPLATE_PREVIEW:
7393        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
7394        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7395        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7396        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7397        edge_mode = ANDROID_EDGE_MODE_FAST;
7398        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7399        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7400        break;
7401      case CAMERA3_TEMPLATE_STILL_CAPTURE:
7402        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
7403        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7404        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7405        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
7406        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
7407        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
7408        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
7409        break;
7410      case CAMERA3_TEMPLATE_VIDEO_RECORD:
7411        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
7412        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7413        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7414        if (eisEnabled) {
7415            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7416        }
7417        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7418        edge_mode = ANDROID_EDGE_MODE_FAST;
7419        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7420        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7421        if (forceVideoOis)
7422            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7423        break;
7424      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7425        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
7426        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
7427        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7428        if (eisEnabled) {
7429            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
7430        }
7431        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7432        edge_mode = ANDROID_EDGE_MODE_FAST;
7433        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7434        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7435        if (forceVideoOis)
7436            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7437        break;
7438      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
7439        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
7440        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
7441        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7442        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7443        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
7444        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
7445        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7446        break;
7447      case CAMERA3_TEMPLATE_MANUAL:
7448        edge_mode = ANDROID_EDGE_MODE_FAST;
7449        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7450        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7451        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7452        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
7453        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7454        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7455        break;
7456      default:
7457        edge_mode = ANDROID_EDGE_MODE_FAST;
7458        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
7459        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
7460        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
7461        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
7462        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7463        break;
7464    }
7465    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
7466    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
7467    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
7468    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
7469        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
7470    }
7471    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
7472
7473    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7474            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
7475        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
7476    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
7477            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
7478            || ois_disable)
7479        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
7480    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
7481
7482    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7483            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
7484
7485    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
7486    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
7487
7488    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
7489    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
7490
7491    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
7492    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
7493
7494    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
7495    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
7496
7497    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
7498    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
7499
7500    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
7501    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
7502
7503    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
7504    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
7505
7506    /*flash*/
7507    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
7508    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
7509
7510    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
7511    settings.update(ANDROID_FLASH_FIRING_POWER,
7512            &flashFiringLevel, 1);
7513
7514    /* lens */
7515    float default_aperture = gCamCapability[mCameraId]->apertures[0];
7516    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
7517
7518    if (gCamCapability[mCameraId]->filter_densities_count) {
7519        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
7520        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
7521                        gCamCapability[mCameraId]->filter_densities_count);
7522    }
7523
7524    float default_focal_length = gCamCapability[mCameraId]->focal_length;
7525    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
7526
7527    float default_focus_distance = 0;
7528    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
7529
7530    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
7531    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
7532
7533    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7534    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7535
7536    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
7537    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
7538
7539    /* face detection (default to OFF) */
7540    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
7541    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
7542
7543    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
7544    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
7545
7546    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
7547    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
7548
7549    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7550    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7551
7552    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7553    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
7554
7555    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7556    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
7557
7558    /* Exposure time(Update the Min Exposure Time)*/
7559    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
7560    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
7561
7562    /* frame duration */
7563    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
7564    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
7565
7566    /* sensitivity */
7567    static const int32_t default_sensitivity = 100;
7568    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
7569
7570    /*edge mode*/
7571    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
7572
7573    /*noise reduction mode*/
7574    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
7575
7576    /*color correction mode*/
7577    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
7578    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
7579
7580    /*transform matrix mode*/
7581    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
7582
7583    int32_t scaler_crop_region[4];
7584    scaler_crop_region[0] = 0;
7585    scaler_crop_region[1] = 0;
7586    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
7587    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
7588    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
7589
7590    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
7591    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
7592
7593    /*focus distance*/
7594    float focus_distance = 0.0;
7595    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
7596
7597    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
7598    float max_range = 0.0;
7599    float max_fixed_fps = 0.0;
7600    int32_t fps_range[2] = {0, 0};
7601    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
7602            i++) {
7603        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
7604            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7605        if (type == CAMERA3_TEMPLATE_PREVIEW ||
7606                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
7607                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
7608            if (range > max_range) {
7609                fps_range[0] =
7610                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7611                fps_range[1] =
7612                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7613                max_range = range;
7614            }
7615        } else {
7616            if (range < 0.01 && max_fixed_fps <
7617                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
7618                fps_range[0] =
7619                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
7620                fps_range[1] =
7621                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7622                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
7623            }
7624        }
7625    }
7626    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
7627
7628    /*precapture trigger*/
7629    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
7630    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
7631
7632    /*af trigger*/
7633    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
7634    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
7635
7636    /* ae & af regions */
7637    int32_t active_region[] = {
7638            gCamCapability[mCameraId]->active_array_size.left,
7639            gCamCapability[mCameraId]->active_array_size.top,
7640            gCamCapability[mCameraId]->active_array_size.left +
7641                    gCamCapability[mCameraId]->active_array_size.width,
7642            gCamCapability[mCameraId]->active_array_size.top +
7643                    gCamCapability[mCameraId]->active_array_size.height,
7644            0};
7645    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
7646            sizeof(active_region) / sizeof(active_region[0]));
7647    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
7648            sizeof(active_region) / sizeof(active_region[0]));
7649
7650    /* black level lock */
7651    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
7652    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
7653
7654    /* lens shading map mode */
7655    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
7656    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
7657        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
7658    }
7659    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
7660
7661    //special defaults for manual template
7662    if (type == CAMERA3_TEMPLATE_MANUAL) {
7663        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
7664        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
7665
7666        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
7667        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
7668
7669        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
7670        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
7671
7672        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
7673        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
7674
7675        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
7676        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
7677
7678        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
7679        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
7680    }
7681
7682
7683    /* TNR
7684     * We'll use this location to determine which modes TNR will be set.
7685     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
7686     * This is not to be confused with linking on a per stream basis that decision
7687     * is still on per-session basis and will be handled as part of config stream
7688     */
7689    uint8_t tnr_enable = 0;
7690
7691    if (m_bTnrPreview || m_bTnrVideo) {
7692
7693        switch (type) {
7694            case CAMERA3_TEMPLATE_VIDEO_RECORD:
7695            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
7696                    tnr_enable = 1;
7697                    break;
7698
7699            default:
7700                    tnr_enable = 0;
7701                    break;
7702        }
7703
7704        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
7705        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7706        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7707
7708        CDBG("%s: TNR:%d with process plate %d for template:%d",
7709                            __func__, tnr_enable, tnr_process_type, type);
7710    }
7711
7712    /* CDS default */
7713    char prop[PROPERTY_VALUE_MAX];
7714    memset(prop, 0, sizeof(prop));
7715    property_get("persist.camera.CDS", prop, "Auto");
7716    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
7717    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
7718    if (CAM_CDS_MODE_MAX == cds_mode) {
7719        cds_mode = CAM_CDS_MODE_AUTO;
7720    }
7721    m_CdsPreference = cds_mode;
7722
7723    /* Disabling CDS in templates which have TNR enabled*/
7724    if (tnr_enable)
7725        cds_mode = CAM_CDS_MODE_OFF;
7726
7727    int32_t mode = cds_mode;
7728    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
7729
7730    /* hybrid ae */
7731    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
7732
7733    mDefaultMetadata[type] = settings.release();
7734
7735    return mDefaultMetadata[type];
7736}
7737
7738/*===========================================================================
7739 * FUNCTION   : setFrameParameters
7740 *
7741 * DESCRIPTION: set parameters per frame as requested in the metadata from
7742 *              framework
7743 *
7744 * PARAMETERS :
7745 *   @request   : request that needs to be serviced
7746 *   @streamID : Stream ID of all the requested streams
7747 *   @blob_request: Whether this request is a blob request or not
7748 *
7749 * RETURN     : success: NO_ERROR
7750 *              failure:
7751 *==========================================================================*/
7752int QCamera3HardwareInterface::setFrameParameters(
7753                    camera3_capture_request_t *request,
7754                    cam_stream_ID_t streamID,
7755                    int blob_request,
7756                    uint32_t snapshotStreamId)
7757{
7758    /*translate from camera_metadata_t type to parm_type_t*/
7759    int rc = 0;
7760    int32_t hal_version = CAM_HAL_V3;
7761
7762    clear_metadata_buffer(mParameters);
7763    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
7764        ALOGE("%s: Failed to set hal version in the parameters", __func__);
7765        return BAD_VALUE;
7766    }
7767
7768    /*we need to update the frame number in the parameters*/
7769    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
7770            request->frame_number)) {
7771        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7772        return BAD_VALUE;
7773    }
7774
7775    /* Update stream id of all the requested buffers */
7776    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
7777        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
7778        return BAD_VALUE;
7779    }
7780
7781    if (mUpdateDebugLevel) {
7782        uint32_t dummyDebugLevel = 0;
7783        /* The value of dummyDebugLevel is irrelavent. On
7784         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
7785        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
7786                dummyDebugLevel)) {
7787            ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
7788            return BAD_VALUE;
7789        }
7790        mUpdateDebugLevel = false;
7791    }
7792
7793    if(request->settings != NULL){
7794        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
7795        if (blob_request)
7796            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
7797    }
7798
7799    return rc;
7800}
7801
7802/*===========================================================================
7803 * FUNCTION   : setReprocParameters
7804 *
7805 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
7806 *              return it.
7807 *
7808 * PARAMETERS :
7809 *   @request   : request that needs to be serviced
7810 *
7811 * RETURN     : success: NO_ERROR
7812 *              failure:
7813 *==========================================================================*/
7814int32_t QCamera3HardwareInterface::setReprocParameters(
7815        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
7816        uint32_t snapshotStreamId)
7817{
7818    /*translate from camera_metadata_t type to parm_type_t*/
7819    int rc = 0;
7820
7821    if (NULL == request->settings){
7822        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
7823        return BAD_VALUE;
7824    }
7825
7826    if (NULL == reprocParam) {
7827        ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
7828        return BAD_VALUE;
7829    }
7830    clear_metadata_buffer(reprocParam);
7831
7832    /*we need to update the frame number in the parameters*/
7833    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
7834            request->frame_number)) {
7835        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
7836        return BAD_VALUE;
7837    }
7838
7839    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
7840    if (rc < 0) {
7841        ALOGE("%s: Failed to translate reproc request", __func__);
7842        return rc;
7843    }
7844
7845    CameraMetadata frame_settings;
7846    frame_settings = request->settings;
7847    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
7848            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
7849        int32_t *crop_count =
7850                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
7851        int32_t *crop_data =
7852                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
7853        int32_t *roi_map =
7854                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
7855        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
7856            cam_crop_data_t crop_meta;
7857            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
7858            crop_meta.num_of_streams = 1;
7859            crop_meta.crop_info[0].crop.left   = crop_data[0];
7860            crop_meta.crop_info[0].crop.top    = crop_data[1];
7861            crop_meta.crop_info[0].crop.width  = crop_data[2];
7862            crop_meta.crop_info[0].crop.height = crop_data[3];
7863
7864            crop_meta.crop_info[0].roi_map.left =
7865                    roi_map[0];
7866            crop_meta.crop_info[0].roi_map.top =
7867                    roi_map[1];
7868            crop_meta.crop_info[0].roi_map.width =
7869                    roi_map[2];
7870            crop_meta.crop_info[0].roi_map.height =
7871                    roi_map[3];
7872
7873            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
7874                rc = BAD_VALUE;
7875            }
7876            CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
7877                    __func__,
7878                    request->input_buffer->stream,
7879                    crop_meta.crop_info[0].crop.left,
7880                    crop_meta.crop_info[0].crop.top,
7881                    crop_meta.crop_info[0].crop.width,
7882                    crop_meta.crop_info[0].crop.height);
7883            CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
7884                    __func__,
7885                    request->input_buffer->stream,
7886                    crop_meta.crop_info[0].roi_map.left,
7887                    crop_meta.crop_info[0].roi_map.top,
7888                    crop_meta.crop_info[0].roi_map.width,
7889                    crop_meta.crop_info[0].roi_map.height);
7890            } else {
7891                ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
7892            }
7893    } else {
7894        ALOGE("%s: No crop data from matching output stream", __func__);
7895    }
7896
7897    /* These settings are not needed for regular requests so handle them specially for
7898       reprocess requests; information needed for EXIF tags */
7899    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
7900        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
7901                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7902        if (NAME_NOT_FOUND != val) {
7903            uint32_t flashMode = (uint32_t)val;
7904            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
7905                rc = BAD_VALUE;
7906            }
7907        } else {
7908            ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
7909                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
7910        }
7911    } else {
7912        CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
7913    }
7914
7915    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
7916        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
7917        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
7918            rc = BAD_VALUE;
7919        }
7920    } else {
7921        CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
7922    }
7923
7924    return rc;
7925}
7926
7927/*===========================================================================
7928 * FUNCTION   : saveRequestSettings
7929 *
7930 * DESCRIPTION: Add any settings that might have changed to the request settings
7931 *              and save the settings to be applied on the frame
7932 *
7933 * PARAMETERS :
7934 *   @jpegMetadata : the extracted and/or modified jpeg metadata
7935 *   @request      : request with initial settings
7936 *
7937 * RETURN     :
7938 * camera_metadata_t* : pointer to the saved request settings
7939 *==========================================================================*/
7940camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
7941        const CameraMetadata &jpegMetadata,
7942        camera3_capture_request_t *request)
7943{
7944    camera_metadata_t *resultMetadata;
7945    CameraMetadata camMetadata;
7946    camMetadata = request->settings;
7947
7948    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7949        int32_t thumbnail_size[2];
7950        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7951        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7952        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
7953                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7954    }
7955
7956    resultMetadata = camMetadata.release();
7957    return resultMetadata;
7958}
7959
7960/*===========================================================================
7961 * FUNCTION   : setHalFpsRange
7962 *
7963 * DESCRIPTION: set FPS range parameter
7964 *
7965 *
7966 * PARAMETERS :
7967 *   @settings    : Metadata from framework
7968 *   @hal_metadata: Metadata buffer
7969 *
7970 *
7971 * RETURN     : success: NO_ERROR
7972 *              failure:
7973 *==========================================================================*/
7974int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
7975        metadata_buffer_t *hal_metadata)
7976{
7977    int32_t rc = NO_ERROR;
7978    cam_fps_range_t fps_range;
7979    fps_range.min_fps = (float)
7980            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
7981    fps_range.max_fps = (float)
7982            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
7983    fps_range.video_min_fps = fps_range.min_fps;
7984    fps_range.video_max_fps = fps_range.max_fps;
7985
7986    CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
7987            fps_range.min_fps, fps_range.max_fps);
7988    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
7989     * follows:
7990     * ---------------------------------------------------------------|
7991     *      Video stream is absent in configure_streams               |
7992     *    (Camcorder preview before the first video record            |
7993     * ---------------------------------------------------------------|
7994     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
7995     *                   |             |             | vid_min/max_fps|
7996     * ---------------------------------------------------------------|
7997     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
7998     *                   |-------------|-------------|----------------|
7999     *                   |  [240, 240] |     240     |  [240, 240]    |
8000     * ---------------------------------------------------------------|
8001     *     Video stream is present in configure_streams               |
8002     * ---------------------------------------------------------------|
8003     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8004     *                   |             |             | vid_min/max_fps|
8005     * ---------------------------------------------------------------|
8006     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
8007     * (camcorder prev   |-------------|-------------|----------------|
8008     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
8009     *  is stopped)      |             |             |                |
8010     * ---------------------------------------------------------------|
8011     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
8012     *                   |-------------|-------------|----------------|
8013     *                   |  [240, 240] |     240     |  [240, 240]    |
8014     * ---------------------------------------------------------------|
8015     * When Video stream is absent in configure_streams,
8016     * preview fps = sensor_fps / batchsize
8017     * Eg: for 240fps at batchSize 4, preview = 60fps
8018     *     for 120fps at batchSize 4, preview = 30fps
8019     *
8020     * When video stream is present in configure_streams, preview fps is as per
8021     * the ratio of preview buffers to video buffers requested in process
8022     * capture request
8023     */
8024    mBatchSize = 0;
8025    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8026        fps_range.min_fps = fps_range.video_max_fps;
8027        fps_range.video_min_fps = fps_range.video_max_fps;
8028        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8029                fps_range.max_fps);
8030        if (NAME_NOT_FOUND != val) {
8031            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8032            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8033                return BAD_VALUE;
8034            }
8035
8036            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8037                /* If batchmode is currently in progress and the fps changes,
8038                 * set the flag to restart the sensor */
8039                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8040                        (mHFRVideoFps != fps_range.max_fps)) {
8041                    mNeedSensorRestart = true;
8042                }
8043                mHFRVideoFps = fps_range.max_fps;
8044                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8045                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8046                    mBatchSize = MAX_HFR_BATCH_SIZE;
8047                }
8048             }
8049            CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
8050
8051         }
8052    } else {
8053        /* HFR mode is session param in backend/ISP. This should be reset when
8054         * in non-HFR mode  */
8055        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8056        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8057            return BAD_VALUE;
8058        }
8059    }
8060    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8061        return BAD_VALUE;
8062    }
8063    CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
8064            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8065    return rc;
8066}
8067
8068/*===========================================================================
8069 * FUNCTION   : translateToHalMetadata
8070 *
8071 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
8072 *
8073 *
8074 * PARAMETERS :
8075 *   @request  : request sent from framework
8076 *
8077 *
8078 * RETURN     : success: NO_ERROR
8079 *              failure:
8080 *==========================================================================*/
8081int QCamera3HardwareInterface::translateToHalMetadata
8082                                  (const camera3_capture_request_t *request,
8083                                   metadata_buffer_t *hal_metadata,
8084                                   uint32_t snapshotStreamId)
8085{
8086    int rc = 0;
8087    CameraMetadata frame_settings;
8088    frame_settings = request->settings;
8089
8090    /* Do not change the order of the following list unless you know what you are
8091     * doing.
8092     * The order is laid out in such a way that parameters in the front of the table
8093     * may be used to override the parameters later in the table. Examples are:
8094     * 1. META_MODE should precede AEC/AWB/AF MODE
8095     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
8096     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
8097     * 4. Any mode should precede it's corresponding settings
8098     */
8099    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
8100        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
8101        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
8102            rc = BAD_VALUE;
8103        }
8104        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
8105        if (rc != NO_ERROR) {
8106            ALOGE("%s: extractSceneMode failed", __func__);
8107        }
8108    }
8109
8110    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8111        uint8_t fwk_aeMode =
8112            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8113        uint8_t aeMode;
8114        int32_t redeye;
8115
8116        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
8117            aeMode = CAM_AE_MODE_OFF;
8118        } else {
8119            aeMode = CAM_AE_MODE_ON;
8120        }
8121        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
8122            redeye = 1;
8123        } else {
8124            redeye = 0;
8125        }
8126
8127        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8128                fwk_aeMode);
8129        if (NAME_NOT_FOUND != val) {
8130            int32_t flashMode = (int32_t)val;
8131            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
8132        }
8133
8134        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
8135        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
8136            rc = BAD_VALUE;
8137        }
8138    }
8139
8140    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
8141        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
8142        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8143                fwk_whiteLevel);
8144        if (NAME_NOT_FOUND != val) {
8145            uint8_t whiteLevel = (uint8_t)val;
8146            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
8147                rc = BAD_VALUE;
8148            }
8149        }
8150    }
8151
8152    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
8153        uint8_t fwk_cacMode =
8154                frame_settings.find(
8155                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
8156        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8157                fwk_cacMode);
8158        if (NAME_NOT_FOUND != val) {
8159            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
8160            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
8161                rc = BAD_VALUE;
8162            }
8163        } else {
8164            ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
8165        }
8166    }
8167
8168    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
8169        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
8170        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8171                fwk_focusMode);
8172        if (NAME_NOT_FOUND != val) {
8173            uint8_t focusMode = (uint8_t)val;
8174            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
8175                rc = BAD_VALUE;
8176            }
8177        }
8178    }
8179
8180    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
8181        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
8182        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
8183                focalDistance)) {
8184            rc = BAD_VALUE;
8185        }
8186    }
8187
8188    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
8189        uint8_t fwk_antibandingMode =
8190                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
8191        int val = lookupHalName(ANTIBANDING_MODES_MAP,
8192                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
8193        if (NAME_NOT_FOUND != val) {
8194            uint32_t hal_antibandingMode = (uint32_t)val;
8195            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
8196                    hal_antibandingMode)) {
8197                rc = BAD_VALUE;
8198            }
8199        }
8200    }
8201
8202    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
8203        int32_t expCompensation = frame_settings.find(
8204                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
8205        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
8206            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
8207        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
8208            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
8209        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
8210                expCompensation)) {
8211            rc = BAD_VALUE;
8212        }
8213    }
8214
8215    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
8216        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
8217        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
8218            rc = BAD_VALUE;
8219        }
8220    }
8221    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
8222        rc = setHalFpsRange(frame_settings, hal_metadata);
8223        if (rc != NO_ERROR) {
8224            ALOGE("%s: setHalFpsRange failed", __func__);
8225        }
8226    }
8227
8228    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
8229        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
8230        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
8231            rc = BAD_VALUE;
8232        }
8233    }
8234
8235    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
8236        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
8237        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8238                fwk_effectMode);
8239        if (NAME_NOT_FOUND != val) {
8240            uint8_t effectMode = (uint8_t)val;
8241            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
8242                rc = BAD_VALUE;
8243            }
8244        }
8245    }
8246
8247    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
8248        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
8249        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
8250                colorCorrectMode)) {
8251            rc = BAD_VALUE;
8252        }
8253    }
8254
8255    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
8256        cam_color_correct_gains_t colorCorrectGains;
8257        for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
8258            colorCorrectGains.gains[i] =
8259                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
8260        }
8261        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
8262                colorCorrectGains)) {
8263            rc = BAD_VALUE;
8264        }
8265    }
8266
8267    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
8268        cam_color_correct_matrix_t colorCorrectTransform;
8269        cam_rational_type_t transform_elem;
8270        size_t num = 0;
8271        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
8272           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
8273              transform_elem.numerator =
8274                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
8275              transform_elem.denominator =
8276                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
8277              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
8278              num++;
8279           }
8280        }
8281        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
8282                colorCorrectTransform)) {
8283            rc = BAD_VALUE;
8284        }
8285    }
8286
8287    cam_trigger_t aecTrigger;
8288    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
8289    aecTrigger.trigger_id = -1;
8290    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
8291        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
8292        aecTrigger.trigger =
8293            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
8294        aecTrigger.trigger_id =
8295            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
8296        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
8297                aecTrigger)) {
8298            rc = BAD_VALUE;
8299        }
8300        CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
8301                aecTrigger.trigger, aecTrigger.trigger_id);
8302    }
8303
8304    /*af_trigger must come with a trigger id*/
8305    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
8306        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
8307        cam_trigger_t af_trigger;
8308        af_trigger.trigger =
8309            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
8310        af_trigger.trigger_id =
8311            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
8312        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
8313            rc = BAD_VALUE;
8314        }
8315        CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
8316                af_trigger.trigger, af_trigger.trigger_id);
8317    }
8318
8319    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
8320        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
8321        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
8322            rc = BAD_VALUE;
8323        }
8324    }
8325    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
8326        cam_edge_application_t edge_application;
8327        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
8328        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
8329            edge_application.sharpness = 0;
8330        } else {
8331            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
8332        }
8333        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
8334            rc = BAD_VALUE;
8335        }
8336    }
8337
8338    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8339        int32_t respectFlashMode = 1;
8340        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
8341            uint8_t fwk_aeMode =
8342                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
8343            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
8344                respectFlashMode = 0;
8345                CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
8346                    __func__);
8347            }
8348        }
8349        if (respectFlashMode) {
8350            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8351                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8352            CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
8353            // To check: CAM_INTF_META_FLASH_MODE usage
8354            if (NAME_NOT_FOUND != val) {
8355                uint8_t flashMode = (uint8_t)val;
8356                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
8357                    rc = BAD_VALUE;
8358                }
8359            }
8360        }
8361    }
8362
8363    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
8364        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
8365        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
8366            rc = BAD_VALUE;
8367        }
8368    }
8369
8370    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
8371        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
8372        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
8373                flashFiringTime)) {
8374            rc = BAD_VALUE;
8375        }
8376    }
8377
8378    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
8379        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
8380        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
8381                hotPixelMode)) {
8382            rc = BAD_VALUE;
8383        }
8384    }
8385
8386    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
8387        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
8388        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
8389                lensAperture)) {
8390            rc = BAD_VALUE;
8391        }
8392    }
8393
8394    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
8395        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
8396        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
8397                filterDensity)) {
8398            rc = BAD_VALUE;
8399        }
8400    }
8401
8402    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
8403        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
8404        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
8405                focalLength)) {
8406            rc = BAD_VALUE;
8407        }
8408    }
8409
8410    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
8411        uint8_t optStabMode =
8412                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
8413        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
8414                optStabMode)) {
8415            rc = BAD_VALUE;
8416        }
8417    }
8418
8419    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
8420        uint8_t videoStabMode =
8421                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
8422        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
8423                videoStabMode)) {
8424            rc = BAD_VALUE;
8425        }
8426    }
8427
8428
8429    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
8430        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
8431        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
8432                noiseRedMode)) {
8433            rc = BAD_VALUE;
8434        }
8435    }
8436
8437    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
8438        float reprocessEffectiveExposureFactor =
8439            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
8440        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
8441                reprocessEffectiveExposureFactor)) {
8442            rc = BAD_VALUE;
8443        }
8444    }
8445
8446    cam_crop_region_t scalerCropRegion;
8447    bool scalerCropSet = false;
8448    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
8449        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
8450        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
8451        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
8452        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
8453
8454        // Map coordinate system from active array to sensor output.
8455        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
8456                scalerCropRegion.width, scalerCropRegion.height);
8457
8458        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
8459                scalerCropRegion)) {
8460            rc = BAD_VALUE;
8461        }
8462        scalerCropSet = true;
8463    }
8464
8465    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
8466        int64_t sensorExpTime =
8467                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
8468        CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
8469        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
8470                sensorExpTime)) {
8471            rc = BAD_VALUE;
8472        }
8473    }
8474
8475    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
8476        int64_t sensorFrameDuration =
8477                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
8478        int64_t minFrameDuration = getMinFrameDuration(request);
8479        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
8480        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
8481            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
8482        CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
8483        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
8484                sensorFrameDuration)) {
8485            rc = BAD_VALUE;
8486        }
8487    }
8488
8489    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
8490        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
8491        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
8492                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
8493        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
8494                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
8495        CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
8496        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
8497                sensorSensitivity)) {
8498            rc = BAD_VALUE;
8499        }
8500    }
8501
8502    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
8503        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
8504        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
8505            rc = BAD_VALUE;
8506        }
8507    }
8508
8509    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
8510        uint8_t fwk_facedetectMode =
8511                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
8512
8513        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
8514                fwk_facedetectMode);
8515
8516        if (NAME_NOT_FOUND != val) {
8517            uint8_t facedetectMode = (uint8_t)val;
8518            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
8519                    facedetectMode)) {
8520                rc = BAD_VALUE;
8521            }
8522        }
8523    }
8524
8525    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
8526        uint8_t histogramMode =
8527                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
8528        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
8529                histogramMode)) {
8530            rc = BAD_VALUE;
8531        }
8532    }
8533
8534    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
8535        uint8_t sharpnessMapMode =
8536                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
8537        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
8538                sharpnessMapMode)) {
8539            rc = BAD_VALUE;
8540        }
8541    }
8542
8543    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
8544        uint8_t tonemapMode =
8545                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
8546        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
8547            rc = BAD_VALUE;
8548        }
8549    }
8550    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
8551    /*All tonemap channels will have the same number of points*/
8552    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
8553        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
8554        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
8555        cam_rgb_tonemap_curves tonemapCurves;
8556        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
8557        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
8558            ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
8559                    __func__, tonemapCurves.tonemap_points_cnt,
8560                    CAM_MAX_TONEMAP_CURVE_SIZE);
8561            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
8562        }
8563
8564        /* ch0 = G*/
8565        size_t point = 0;
8566        cam_tonemap_curve_t tonemapCurveGreen;
8567        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8568            for (size_t j = 0; j < 2; j++) {
8569               tonemapCurveGreen.tonemap_points[i][j] =
8570                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
8571               point++;
8572            }
8573        }
8574        tonemapCurves.curves[0] = tonemapCurveGreen;
8575
8576        /* ch 1 = B */
8577        point = 0;
8578        cam_tonemap_curve_t tonemapCurveBlue;
8579        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8580            for (size_t j = 0; j < 2; j++) {
8581               tonemapCurveBlue.tonemap_points[i][j] =
8582                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
8583               point++;
8584            }
8585        }
8586        tonemapCurves.curves[1] = tonemapCurveBlue;
8587
8588        /* ch 2 = R */
8589        point = 0;
8590        cam_tonemap_curve_t tonemapCurveRed;
8591        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
8592            for (size_t j = 0; j < 2; j++) {
8593               tonemapCurveRed.tonemap_points[i][j] =
8594                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
8595               point++;
8596            }
8597        }
8598        tonemapCurves.curves[2] = tonemapCurveRed;
8599
8600        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
8601                tonemapCurves)) {
8602            rc = BAD_VALUE;
8603        }
8604    }
8605
8606    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
8607        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
8608        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
8609                captureIntent)) {
8610            rc = BAD_VALUE;
8611        }
8612    }
8613
8614    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
8615        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
8616        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
8617                blackLevelLock)) {
8618            rc = BAD_VALUE;
8619        }
8620    }
8621
8622    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
8623        uint8_t lensShadingMapMode =
8624                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
8625        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
8626                lensShadingMapMode)) {
8627            rc = BAD_VALUE;
8628        }
8629    }
8630
8631    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
8632        cam_area_t roi;
8633        bool reset = true;
8634        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
8635
8636        // Map coordinate system from active array to sensor output.
8637        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8638                roi.rect.height);
8639
8640        if (scalerCropSet) {
8641            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8642        }
8643        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
8644            rc = BAD_VALUE;
8645        }
8646    }
8647
8648    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
8649        cam_area_t roi;
8650        bool reset = true;
8651        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
8652
8653        // Map coordinate system from active array to sensor output.
8654        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
8655                roi.rect.height);
8656
8657        if (scalerCropSet) {
8658            reset = resetIfNeededROI(&roi, &scalerCropRegion);
8659        }
8660        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
8661            rc = BAD_VALUE;
8662        }
8663    }
8664
8665    if (m_bIs4KVideo) {
8666        /* Override needed for Video template in case of 4K video */
8667        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8668                CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
8669            rc = BAD_VALUE;
8670        }
8671    } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
8672            frame_settings.exists(QCAMERA3_CDS_MODE)) {
8673        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
8674        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
8675            ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
8676        } else {
8677            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8678                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
8679                rc = BAD_VALUE;
8680            }
8681        }
8682    }
8683
8684    // TNR
8685    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
8686        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
8687        uint8_t b_TnrRequested = 0;
8688        cam_denoise_param_t tnr;
8689        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
8690        tnr.process_plates =
8691            (cam_denoise_process_type_t)frame_settings.find(
8692            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
8693        b_TnrRequested = tnr.denoise_enable;
8694        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
8695            rc = BAD_VALUE;
8696        }
8697    }
8698
8699    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
8700        int32_t fwk_testPatternMode =
8701                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
8702        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
8703                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
8704
8705        if (NAME_NOT_FOUND != testPatternMode) {
8706            cam_test_pattern_data_t testPatternData;
8707            memset(&testPatternData, 0, sizeof(testPatternData));
8708            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
8709            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
8710                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
8711                int32_t *fwk_testPatternData =
8712                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
8713                testPatternData.r = fwk_testPatternData[0];
8714                testPatternData.b = fwk_testPatternData[3];
8715                switch (gCamCapability[mCameraId]->color_arrangement) {
8716                    case CAM_FILTER_ARRANGEMENT_RGGB:
8717                    case CAM_FILTER_ARRANGEMENT_GRBG:
8718                        testPatternData.gr = fwk_testPatternData[1];
8719                        testPatternData.gb = fwk_testPatternData[2];
8720                        break;
8721                    case CAM_FILTER_ARRANGEMENT_GBRG:
8722                    case CAM_FILTER_ARRANGEMENT_BGGR:
8723                        testPatternData.gr = fwk_testPatternData[2];
8724                        testPatternData.gb = fwk_testPatternData[1];
8725                        break;
8726                    default:
8727                        ALOGE("%s: color arrangement %d is not supported", __func__,
8728                                gCamCapability[mCameraId]->color_arrangement);
8729                        break;
8730                }
8731            }
8732            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
8733                    testPatternData)) {
8734                rc = BAD_VALUE;
8735            }
8736        } else {
8737            ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
8738                    fwk_testPatternMode);
8739        }
8740    }
8741
8742    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
8743        size_t count = 0;
8744        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
8745        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
8746                gps_coords.data.d, gps_coords.count, count);
8747        if (gps_coords.count != count) {
8748            rc = BAD_VALUE;
8749        }
8750    }
8751
8752    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
8753        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
8754        size_t count = 0;
8755        const char *gps_methods_src = (const char *)
8756                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
8757        memset(gps_methods, '\0', sizeof(gps_methods));
8758        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
8759        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
8760                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
8761        if (GPS_PROCESSING_METHOD_SIZE != count) {
8762            rc = BAD_VALUE;
8763        }
8764    }
8765
8766    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
8767        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
8768        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
8769                gps_timestamp)) {
8770            rc = BAD_VALUE;
8771        }
8772    }
8773
8774    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8775        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
8776        cam_rotation_info_t rotation_info;
8777        if (orientation == 0) {
8778           rotation_info.rotation = ROTATE_0;
8779        } else if (orientation == 90) {
8780           rotation_info.rotation = ROTATE_90;
8781        } else if (orientation == 180) {
8782           rotation_info.rotation = ROTATE_180;
8783        } else if (orientation == 270) {
8784           rotation_info.rotation = ROTATE_270;
8785        }
8786        rotation_info.streamId = snapshotStreamId;
8787        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
8788        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
8789            rc = BAD_VALUE;
8790        }
8791    }
8792
8793    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
8794        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
8795        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
8796            rc = BAD_VALUE;
8797        }
8798    }
8799
8800    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
8801        uint32_t thumb_quality = (uint32_t)
8802                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
8803        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
8804                thumb_quality)) {
8805            rc = BAD_VALUE;
8806        }
8807    }
8808
8809    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8810        cam_dimension_t dim;
8811        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8812        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8813        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
8814            rc = BAD_VALUE;
8815        }
8816    }
8817
8818    // Internal metadata
8819    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
8820        size_t count = 0;
8821        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
8822        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
8823                privatedata.data.i32, privatedata.count, count);
8824        if (privatedata.count != count) {
8825            rc = BAD_VALUE;
8826        }
8827    }
8828
8829    if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
8830        uint8_t* use_av_timer =
8831                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
8832        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
8833            rc = BAD_VALUE;
8834        }
8835    }
8836
8837    // EV step
8838    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
8839            gCamCapability[mCameraId]->exp_compensation_step)) {
8840        rc = BAD_VALUE;
8841    }
8842
8843    // CDS info
8844    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
8845        cam_cds_data_t *cdsData = (cam_cds_data_t *)
8846                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
8847
8848        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8849                CAM_INTF_META_CDS_DATA, *cdsData)) {
8850            rc = BAD_VALUE;
8851        }
8852    }
8853
8854    // Hybrid AE
8855    if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
8856        uint8_t *hybrid_ae = (uint8_t *)
8857                frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
8858
8859        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
8860                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
8861            rc = BAD_VALUE;
8862        }
8863    }
8864
8865    return rc;
8866}
8867
8868/*===========================================================================
8869 * FUNCTION   : captureResultCb
8870 *
8871 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
8872 *
8873 * PARAMETERS :
8874 *   @frame  : frame information from mm-camera-interface
8875 *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
8876 *   @userdata: userdata
8877 *
8878 * RETURN     : NONE
8879 *==========================================================================*/
8880void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
8881                camera3_stream_buffer_t *buffer,
8882                uint32_t frame_number, bool isInputBuffer, void *userdata)
8883{
8884    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
8885    if (hw == NULL) {
8886        ALOGE("%s: Invalid hw %p", __func__, hw);
8887        return;
8888    }
8889
8890    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
8891    return;
8892}
8893
8894
8895/*===========================================================================
8896 * FUNCTION   : initialize
8897 *
8898 * DESCRIPTION: Pass framework callback pointers to HAL
8899 *
8900 * PARAMETERS :
8901 *
8902 *
8903 * RETURN     : Success : 0
8904 *              Failure: -ENODEV
8905 *==========================================================================*/
8906
8907int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
8908                                  const camera3_callback_ops_t *callback_ops)
8909{
8910    CDBG("%s: E", __func__);
8911    QCamera3HardwareInterface *hw =
8912        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8913    if (!hw) {
8914        ALOGE("%s: NULL camera device", __func__);
8915        return -ENODEV;
8916    }
8917
8918    int rc = hw->initialize(callback_ops);
8919    CDBG("%s: X", __func__);
8920    return rc;
8921}
8922
8923/*===========================================================================
8924 * FUNCTION   : configure_streams
8925 *
8926 * DESCRIPTION:
8927 *
8928 * PARAMETERS :
8929 *
8930 *
8931 * RETURN     : Success: 0
8932 *              Failure: -EINVAL (if stream configuration is invalid)
8933 *                       -ENODEV (fatal error)
8934 *==========================================================================*/
8935
8936int QCamera3HardwareInterface::configure_streams(
8937        const struct camera3_device *device,
8938        camera3_stream_configuration_t *stream_list)
8939{
8940    CDBG("%s: E", __func__);
8941    QCamera3HardwareInterface *hw =
8942        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8943    if (!hw) {
8944        ALOGE("%s: NULL camera device", __func__);
8945        return -ENODEV;
8946    }
8947    int rc = hw->configureStreams(stream_list);
8948    CDBG("%s: X", __func__);
8949    return rc;
8950}
8951
8952/*===========================================================================
8953 * FUNCTION   : construct_default_request_settings
8954 *
8955 * DESCRIPTION: Configure a settings buffer to meet the required use case
8956 *
8957 * PARAMETERS :
8958 *
8959 *
8960 * RETURN     : Success: Return valid metadata
8961 *              Failure: Return NULL
8962 *==========================================================================*/
8963const camera_metadata_t* QCamera3HardwareInterface::
8964    construct_default_request_settings(const struct camera3_device *device,
8965                                        int type)
8966{
8967
8968    CDBG("%s: E", __func__);
8969    camera_metadata_t* fwk_metadata = NULL;
8970    QCamera3HardwareInterface *hw =
8971        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
8972    if (!hw) {
8973        ALOGE("%s: NULL camera device", __func__);
8974        return NULL;
8975    }
8976
8977    fwk_metadata = hw->translateCapabilityToMetadata(type);
8978
8979    CDBG("%s: X", __func__);
8980    return fwk_metadata;
8981}
8982
8983/*===========================================================================
8984 * FUNCTION   : process_capture_request
8985 *
8986 * DESCRIPTION:
8987 *
8988 * PARAMETERS :
8989 *
8990 *
8991 * RETURN     :
8992 *==========================================================================*/
8993int QCamera3HardwareInterface::process_capture_request(
8994                    const struct camera3_device *device,
8995                    camera3_capture_request_t *request)
8996{
8997    CDBG("%s: E", __func__);
8998    QCamera3HardwareInterface *hw =
8999        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9000    if (!hw) {
9001        ALOGE("%s: NULL camera device", __func__);
9002        return -EINVAL;
9003    }
9004
9005    int rc = hw->processCaptureRequest(request);
9006    CDBG("%s: X", __func__);
9007    return rc;
9008}
9009
9010/*===========================================================================
9011 * FUNCTION   : dump
9012 *
9013 * DESCRIPTION:
9014 *
9015 * PARAMETERS :
9016 *
9017 *
9018 * RETURN     :
9019 *==========================================================================*/
9020
9021void QCamera3HardwareInterface::dump(
9022                const struct camera3_device *device, int fd)
9023{
9024    /* Log level property is read when "adb shell dumpsys media.camera" is
9025       called so that the log level can be controlled without restarting
9026       the media server */
9027    getLogLevel();
9028
9029    CDBG("%s: E", __func__);
9030    QCamera3HardwareInterface *hw =
9031        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9032    if (!hw) {
9033        ALOGE("%s: NULL camera device", __func__);
9034        return;
9035    }
9036
9037    hw->dump(fd);
9038    CDBG("%s: X", __func__);
9039    return;
9040}
9041
9042/*===========================================================================
9043 * FUNCTION   : flush
9044 *
9045 * DESCRIPTION:
9046 *
9047 * PARAMETERS :
9048 *
9049 *
9050 * RETURN     :
9051 *==========================================================================*/
9052
9053int QCamera3HardwareInterface::flush(
9054                const struct camera3_device *device)
9055{
9056    int rc;
9057    CDBG("%s: E", __func__);
9058    QCamera3HardwareInterface *hw =
9059        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9060    if (!hw) {
9061        ALOGE("%s: NULL camera device", __func__);
9062        return -EINVAL;
9063    }
9064
9065    rc = hw->flush();
9066    CDBG("%s: X", __func__);
9067    return rc;
9068}
9069
9070/*===========================================================================
9071 * FUNCTION   : close_camera_device
9072 *
9073 * DESCRIPTION:
9074 *
9075 * PARAMETERS :
9076 *
9077 *
9078 * RETURN     :
9079 *==========================================================================*/
9080int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
9081{
9082    CDBG("%s: E", __func__);
9083    int ret = NO_ERROR;
9084    QCamera3HardwareInterface *hw =
9085        reinterpret_cast<QCamera3HardwareInterface *>(
9086            reinterpret_cast<camera3_device_t *>(device)->priv);
9087    if (!hw) {
9088        ALOGE("NULL camera device");
9089        return BAD_VALUE;
9090    }
9091    delete hw;
9092
9093    CDBG("%s: X", __func__);
9094    return ret;
9095}
9096
9097/*===========================================================================
9098 * FUNCTION   : getWaveletDenoiseProcessPlate
9099 *
9100 * DESCRIPTION: query wavelet denoise process plate
9101 *
9102 * PARAMETERS : None
9103 *
9104 * RETURN     : WNR prcocess plate value
9105 *==========================================================================*/
9106cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
9107{
9108    char prop[PROPERTY_VALUE_MAX];
9109    memset(prop, 0, sizeof(prop));
9110    property_get("persist.denoise.process.plates", prop, "0");
9111    int processPlate = atoi(prop);
9112    switch(processPlate) {
9113    case 0:
9114        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9115    case 1:
9116        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9117    case 2:
9118        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9119    case 3:
9120        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9121    default:
9122        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9123    }
9124}
9125
9126
9127/*===========================================================================
9128 * FUNCTION   : getTemporalDenoiseProcessPlate
9129 *
9130 * DESCRIPTION: query temporal denoise process plate
9131 *
9132 * PARAMETERS : None
9133 *
9134 * RETURN     : TNR prcocess plate value
9135 *==========================================================================*/
9136cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
9137{
9138    char prop[PROPERTY_VALUE_MAX];
9139    memset(prop, 0, sizeof(prop));
9140    property_get("persist.tnr.process.plates", prop, "0");
9141    int processPlate = atoi(prop);
9142    switch(processPlate) {
9143    case 0:
9144        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
9145    case 1:
9146        return CAM_WAVELET_DENOISE_CBCR_ONLY;
9147    case 2:
9148        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9149    case 3:
9150        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
9151    default:
9152        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
9153    }
9154}
9155
9156
9157/*===========================================================================
9158 * FUNCTION   : extractSceneMode
9159 *
9160 * DESCRIPTION: Extract scene mode from frameworks set metadata
9161 *
9162 * PARAMETERS :
9163 *      @frame_settings: CameraMetadata reference
9164 *      @metaMode: ANDROID_CONTORL_MODE
9165 *      @hal_metadata: hal metadata structure
9166 *
9167 * RETURN     : None
9168 *==========================================================================*/
9169int32_t QCamera3HardwareInterface::extractSceneMode(
9170        const CameraMetadata &frame_settings, uint8_t metaMode,
9171        metadata_buffer_t *hal_metadata)
9172{
9173    int32_t rc = NO_ERROR;
9174
9175    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
9176        camera_metadata_ro_entry entry =
9177                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
9178        if (0 == entry.count)
9179            return rc;
9180
9181        uint8_t fwk_sceneMode = entry.data.u8[0];
9182
9183        int val = lookupHalName(SCENE_MODES_MAP,
9184                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
9185                fwk_sceneMode);
9186        if (NAME_NOT_FOUND != val) {
9187            uint8_t sceneMode = (uint8_t)val;
9188            CDBG("%s: sceneMode: %d", __func__, sceneMode);
9189            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9190                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9191                rc = BAD_VALUE;
9192            }
9193        }
9194    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
9195            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
9196        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
9197        CDBG("%s: sceneMode: %d", __func__, sceneMode);
9198        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9199                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
9200            rc = BAD_VALUE;
9201        }
9202    }
9203    return rc;
9204}
9205
9206/*===========================================================================
9207 * FUNCTION   : needRotationReprocess
9208 *
9209 * DESCRIPTION: if rotation needs to be done by reprocess in pp
9210 *
9211 * PARAMETERS : none
9212 *
9213 * RETURN     : true: needed
9214 *              false: no need
9215 *==========================================================================*/
9216bool QCamera3HardwareInterface::needRotationReprocess()
9217{
9218    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
9219        // current rotation is not zero, and pp has the capability to process rotation
9220        CDBG_HIGH("%s: need do reprocess for rotation", __func__);
9221        return true;
9222    }
9223
9224    return false;
9225}
9226
9227/*===========================================================================
9228 * FUNCTION   : needReprocess
9229 *
9230 * DESCRIPTION: if reprocess in needed
9231 *
9232 * PARAMETERS : none
9233 *
9234 * RETURN     : true: needed
9235 *              false: no need
9236 *==========================================================================*/
9237bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
9238{
9239    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
9240        // TODO: add for ZSL HDR later
9241        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
9242        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
9243            CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
9244            return true;
9245        } else {
9246            CDBG_HIGH("%s: already post processed frame", __func__);
9247            return false;
9248        }
9249    }
9250    return needRotationReprocess();
9251}
9252
9253/*===========================================================================
9254 * FUNCTION   : needJpegRotation
9255 *
9256 * DESCRIPTION: if rotation from jpeg is needed
9257 *
9258 * PARAMETERS : none
9259 *
9260 * RETURN     : true: needed
9261 *              false: no need
9262 *==========================================================================*/
9263bool QCamera3HardwareInterface::needJpegRotation()
9264{
9265   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
9266    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
9267       CDBG("%s: Need Jpeg to do the rotation", __func__);
9268       return true;
9269    }
9270    return false;
9271}
9272
9273/*===========================================================================
9274 * FUNCTION   : addOfflineReprocChannel
9275 *
9276 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
9277 *              coming from input channel
9278 *
9279 * PARAMETERS :
9280 *   @config  : reprocess configuration
9281 *   @inputChHandle : pointer to the input (source) channel
9282 *
9283 *
9284 * RETURN     : Ptr to the newly created channel obj. NULL if failed.
9285 *==========================================================================*/
9286QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
9287        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
9288{
9289    int32_t rc = NO_ERROR;
9290    QCamera3ReprocessChannel *pChannel = NULL;
9291
9292    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
9293            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
9294            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
9295    if (NULL == pChannel) {
9296        ALOGE("%s: no mem for reprocess channel", __func__);
9297        return NULL;
9298    }
9299
9300    rc = pChannel->initialize(IS_TYPE_NONE);
9301    if (rc != NO_ERROR) {
9302        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
9303        delete pChannel;
9304        return NULL;
9305    }
9306
9307    // pp feature config
9308    cam_pp_feature_config_t pp_config;
9309    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
9310
9311    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
9312
9313    rc = pChannel->addReprocStreamsFromSource(pp_config,
9314            config,
9315            IS_TYPE_NONE,
9316            mMetadataChannel);
9317
9318    if (rc != NO_ERROR) {
9319        delete pChannel;
9320        return NULL;
9321    }
9322    return pChannel;
9323}
9324
9325/*===========================================================================
9326 * FUNCTION   : getMobicatMask
9327 *
9328 * DESCRIPTION: returns mobicat mask
9329 *
9330 * PARAMETERS : none
9331 *
9332 * RETURN     : mobicat mask
9333 *
9334 *==========================================================================*/
9335uint8_t QCamera3HardwareInterface::getMobicatMask()
9336{
9337    return m_MobicatMask;
9338}
9339
9340/*===========================================================================
9341 * FUNCTION   : setMobicat
9342 *
9343 * DESCRIPTION: set Mobicat on/off.
9344 *
9345 * PARAMETERS :
9346 *   @params  : none
9347 *
9348 * RETURN     : int32_t type of status
9349 *              NO_ERROR  -- success
9350 *              none-zero failure code
9351 *==========================================================================*/
9352int32_t QCamera3HardwareInterface::setMobicat()
9353{
9354    char value [PROPERTY_VALUE_MAX];
9355    property_get("persist.camera.mobicat", value, "0");
9356    int32_t ret = NO_ERROR;
9357    uint8_t enableMobi = (uint8_t)atoi(value);
9358
9359    if (enableMobi) {
9360        tune_cmd_t tune_cmd;
9361        tune_cmd.type = SET_RELOAD_CHROMATIX;
9362        tune_cmd.module = MODULE_ALL;
9363        tune_cmd.value = TRUE;
9364        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9365                CAM_INTF_PARM_SET_VFE_COMMAND,
9366                tune_cmd);
9367
9368        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9369                CAM_INTF_PARM_SET_PP_COMMAND,
9370                tune_cmd);
9371    }
9372    m_MobicatMask = enableMobi;
9373
9374    return ret;
9375}
9376
9377/*===========================================================================
9378* FUNCTION   : getLogLevel
9379*
9380* DESCRIPTION: Reads the log level property into a variable
9381*
9382* PARAMETERS :
9383*   None
9384*
9385* RETURN     :
9386*   None
9387*==========================================================================*/
9388void QCamera3HardwareInterface::getLogLevel()
9389{
9390    char prop[PROPERTY_VALUE_MAX];
9391    uint32_t globalLogLevel = 0;
9392
9393    property_get("persist.camera.hal.debug", prop, "0");
9394    int val = atoi(prop);
9395    if (0 <= val) {
9396        gCamHal3LogLevel = (uint32_t)val;
9397    }
9398    property_get("persist.camera.global.debug", prop, "0");
9399    val = atoi(prop);
9400    if (0 <= val) {
9401        globalLogLevel = (uint32_t)val;
9402    }
9403
9404    /* Highest log level among hal.logs and global.logs is selected */
9405    if (gCamHal3LogLevel < globalLogLevel)
9406        gCamHal3LogLevel = globalLogLevel;
9407
9408    return;
9409}
9410
9411/*===========================================================================
9412 * FUNCTION   : validateStreamRotations
9413 *
9414 * DESCRIPTION: Check if the rotations requested are supported
9415 *
9416 * PARAMETERS :
9417 *   @stream_list : streams to be configured
9418 *
9419 * RETURN     : NO_ERROR on success
9420 *              -EINVAL on failure
9421 *
9422 *==========================================================================*/
9423int QCamera3HardwareInterface::validateStreamRotations(
9424        camera3_stream_configuration_t *streamList)
9425{
9426    int rc = NO_ERROR;
9427
9428    /*
9429    * Loop through all streams requested in configuration
9430    * Check if unsupported rotations have been requested on any of them
9431    */
9432    for (size_t j = 0; j < streamList->num_streams; j++){
9433        camera3_stream_t *newStream = streamList->streams[j];
9434
9435        switch(newStream->rotation) {
9436            case CAMERA3_STREAM_ROTATION_0:
9437            case CAMERA3_STREAM_ROTATION_90:
9438            case CAMERA3_STREAM_ROTATION_180:
9439            case CAMERA3_STREAM_ROTATION_270:
9440                //Expected values
9441                break;
9442            default:
9443                ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
9444                        "type:%d and stream format:%d", __func__,
9445                        newStream->rotation, newStream->stream_type,
9446                        newStream->format);
9447                return -EINVAL;
9448        }
9449
9450        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
9451        bool isImplDef = (newStream->format ==
9452                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
9453        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
9454                isImplDef);
9455
9456        if (isRotated && (!isImplDef || isZsl)) {
9457            ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
9458                    "type:%d and stream format:%d", __func__,
9459                    newStream->rotation, newStream->stream_type,
9460                    newStream->format);
9461            rc = -EINVAL;
9462            break;
9463        }
9464    }
9465    return rc;
9466}
9467
9468/*===========================================================================
9469* FUNCTION   : getFlashInfo
9470*
9471* DESCRIPTION: Retrieve information about whether the device has a flash.
9472*
9473* PARAMETERS :
9474*   @cameraId  : Camera id to query
9475*   @hasFlash  : Boolean indicating whether there is a flash device
9476*                associated with given camera
9477*   @flashNode : If a flash device exists, this will be its device node.
9478*
9479* RETURN     :
9480*   None
9481*==========================================================================*/
9482void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
9483        bool& hasFlash,
9484        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
9485{
9486    cam_capability_t* camCapability = gCamCapability[cameraId];
9487    if (NULL == camCapability) {
9488        hasFlash = false;
9489        flashNode[0] = '\0';
9490    } else {
9491        hasFlash = camCapability->flash_available;
9492        strlcpy(flashNode,
9493                (char*)camCapability->flash_dev_name,
9494                QCAMERA_MAX_FILEPATH_LENGTH);
9495    }
9496}
9497
9498/*===========================================================================
9499* FUNCTION   : getEepromVersionInfo
9500*
9501* DESCRIPTION: Retrieve version info of the sensor EEPROM data
9502*
9503* PARAMETERS : None
9504*
9505* RETURN     : string describing EEPROM version
9506*              "\0" if no such info available
9507*==========================================================================*/
9508const char *QCamera3HardwareInterface::getEepromVersionInfo()
9509{
9510    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
9511}
9512
9513/*===========================================================================
9514* FUNCTION   : getLdafCalib
9515*
9516* DESCRIPTION: Retrieve Laser AF calibration data
9517*
9518* PARAMETERS : None
9519*
9520* RETURN     : Two uint32_t describing laser AF calibration data
9521*              NULL if none is available.
9522*==========================================================================*/
9523const uint32_t *QCamera3HardwareInterface::getLdafCalib()
9524{
9525    if (mLdafCalibExist) {
9526        return &mLdafCalib[0];
9527    } else {
9528        return NULL;
9529    }
9530}
9531
9532/*===========================================================================
9533 * FUNCTION   : dynamicUpdateMetaStreamInfo
9534 *
9535 * DESCRIPTION: This function:
9536 *             (1) stops all the channels
9537 *             (2) returns error on pending requests and buffers
9538 *             (3) sends metastream_info in setparams
9539 *             (4) starts all channels
9540 *             This is useful when sensor has to be restarted to apply any
9541 *             settings such as frame rate from a different sensor mode
9542 *
9543 * PARAMETERS : None
9544 *
9545 * RETURN     : NO_ERROR on success
9546 *              Error codes on failure
9547 *
9548 *==========================================================================*/
9549int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
9550{
9551    ATRACE_CALL();
9552    int rc = NO_ERROR;
9553
9554    CDBG("%s: E", __func__);
9555
9556    rc = stopAllChannels();
9557    if (rc < 0) {
9558        ALOGE("%s: stopAllChannels failed", __func__);
9559        return rc;
9560    }
9561
9562    rc = notifyErrorForPendingRequests();
9563    if (rc < 0) {
9564        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
9565        return rc;
9566    }
9567
9568    /* Send meta stream info once again so that ISP can start */
9569    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
9570            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
9571    CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
9572    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
9573            mParameters);
9574    if (rc < 0) {
9575        ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
9576                __func__);
9577    }
9578
9579    rc = startAllChannels();
9580    if (rc < 0) {
9581        ALOGE("%s: startAllChannels failed", __func__);
9582        return rc;
9583    }
9584
9585    CDBG("%s:%d X", __func__, __LINE__);
9586    return rc;
9587}
9588
9589/*===========================================================================
9590 * FUNCTION   : stopAllChannels
9591 *
9592 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
9593 *
9594 * PARAMETERS : None
9595 *
9596 * RETURN     : NO_ERROR on success
9597 *              Error codes on failure
9598 *
9599 *==========================================================================*/
9600int32_t QCamera3HardwareInterface::stopAllChannels()
9601{
9602    int32_t rc = NO_ERROR;
9603
9604    // Stop the Streams/Channels
9605    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9606        it != mStreamInfo.end(); it++) {
9607        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9608        if (channel != nullptr) {
9609            channel->stop();
9610        }
9611        (*it)->status = INVALID;
9612    }
9613
9614    if (mSupportChannel) {
9615        mSupportChannel->stop();
9616    }
9617    if (mAnalysisChannel) {
9618        mAnalysisChannel->stop();
9619    }
9620    if (mRawDumpChannel) {
9621        mRawDumpChannel->stop();
9622    }
9623    if (mMetadataChannel) {
9624        /* If content of mStreamInfo is not 0, there is metadata stream */
9625        mMetadataChannel->stop();
9626    }
9627
9628    CDBG("%s:%d All channels stopped", __func__, __LINE__);
9629    return rc;
9630}
9631
9632/*===========================================================================
9633 * FUNCTION   : startAllChannels
9634 *
9635 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
9636 *
9637 * PARAMETERS : None
9638 *
9639 * RETURN     : NO_ERROR on success
9640 *              Error codes on failure
9641 *
9642 *==========================================================================*/
9643int32_t QCamera3HardwareInterface::startAllChannels()
9644{
9645    int32_t rc = NO_ERROR;
9646
9647    CDBG("%s: Start all channels ", __func__);
9648    // Start the Streams/Channels
9649    if (mMetadataChannel) {
9650        /* If content of mStreamInfo is not 0, there is metadata stream */
9651        rc = mMetadataChannel->start();
9652        if (rc < 0) {
9653            ALOGE("%s: META channel start failed", __func__);
9654            return rc;
9655        }
9656    }
9657    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9658        it != mStreamInfo.end(); it++) {
9659        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9660        rc = channel->start();
9661        if (rc < 0) {
9662            ALOGE("%s: channel start failed", __func__);
9663            return rc;
9664        }
9665    }
9666    if (mAnalysisChannel) {
9667        mAnalysisChannel->start();
9668    }
9669    if (mSupportChannel) {
9670        rc = mSupportChannel->start();
9671        if (rc < 0) {
9672            ALOGE("%s: Support channel start failed", __func__);
9673            return rc;
9674        }
9675    }
9676    if (mRawDumpChannel) {
9677        rc = mRawDumpChannel->start();
9678        if (rc < 0) {
9679            ALOGE("%s: RAW dump channel start failed", __func__);
9680            return rc;
9681        }
9682    }
9683
9684    CDBG("%s:%d All channels started", __func__, __LINE__);
9685    return rc;
9686}
9687
9688/*===========================================================================
9689 * FUNCTION   : notifyErrorForPendingRequests
9690 *
9691 * DESCRIPTION: This function sends error for all the pending requests/buffers
9692 *
9693 * PARAMETERS : None
9694 *
9695 * RETURN     : Error codes
9696 *              NO_ERROR on success
9697 *
9698 *==========================================================================*/
9699int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
9700{
9701    int32_t rc = NO_ERROR;
9702    unsigned int frameNum = 0;
9703    camera3_capture_result_t result;
9704    camera3_stream_buffer_t *pStream_Buf = NULL;
9705    FlushMap flushMap;
9706
9707    memset(&result, 0, sizeof(camera3_capture_result_t));
9708
9709    if (mPendingRequestsList.size() > 0) {
9710        pendingRequestIterator i = mPendingRequestsList.begin();
9711        frameNum = i->frame_number;
9712    } else {
9713        /* There might still be pending buffers even though there are
9714         no pending requests. Setting the frameNum to MAX so that
9715         all the buffers with smaller frame numbers are returned */
9716        frameNum = UINT_MAX;
9717    }
9718
9719    CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
9720      __func__, frameNum);
9721
9722    // Go through the pending buffers and group them depending
9723    // on frame number
9724    for (List<PendingBufferInfo>::iterator k =
9725            mPendingBuffersMap.mPendingBufferList.begin();
9726            k != mPendingBuffersMap.mPendingBufferList.end();) {
9727
9728        if (k->frame_number < frameNum) {
9729            ssize_t idx = flushMap.indexOfKey(k->frame_number);
9730            if (idx == NAME_NOT_FOUND) {
9731                Vector<PendingBufferInfo> pending;
9732                pending.add(*k);
9733                flushMap.add(k->frame_number, pending);
9734            } else {
9735                Vector<PendingBufferInfo> &pending =
9736                        flushMap.editValueFor(k->frame_number);
9737                pending.add(*k);
9738            }
9739
9740            mPendingBuffersMap.num_buffers--;
9741            k = mPendingBuffersMap.mPendingBufferList.erase(k);
9742        } else {
9743            k++;
9744        }
9745    }
9746
9747    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9748        uint32_t frame_number = flushMap.keyAt(iFlush);
9749        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9750
9751        // Send Error notify to frameworks for each buffer for which
9752        // metadata buffer is already sent
9753        CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
9754          __func__, frame_number, pending.size());
9755
9756        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9757        if (NULL == pStream_Buf) {
9758            ALOGE("%s: No memory for pending buffers array", __func__);
9759            return NO_MEMORY;
9760        }
9761        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9762
9763        for (size_t j = 0; j < pending.size(); j++) {
9764            const PendingBufferInfo &info = pending.itemAt(j);
9765            camera3_notify_msg_t notify_msg;
9766            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9767            notify_msg.type = CAMERA3_MSG_ERROR;
9768            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
9769            notify_msg.message.error.error_stream = info.stream;
9770            notify_msg.message.error.frame_number = frame_number;
9771            pStream_Buf[j].acquire_fence = -1;
9772            pStream_Buf[j].release_fence = -1;
9773            pStream_Buf[j].buffer = info.buffer;
9774            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9775            pStream_Buf[j].stream = info.stream;
9776            mCallbackOps->notify(mCallbackOps, &notify_msg);
9777            CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
9778                    frame_number, info.stream);
9779        }
9780
9781        result.result = NULL;
9782        result.frame_number = frame_number;
9783        result.num_output_buffers = (uint32_t)pending.size();
9784        result.output_buffers = pStream_Buf;
9785        mCallbackOps->process_capture_result(mCallbackOps, &result);
9786
9787        delete [] pStream_Buf;
9788    }
9789
9790    CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
9791
9792    flushMap.clear();
9793    for (List<PendingBufferInfo>::iterator k =
9794            mPendingBuffersMap.mPendingBufferList.begin();
9795            k != mPendingBuffersMap.mPendingBufferList.end();) {
9796        ssize_t idx = flushMap.indexOfKey(k->frame_number);
9797        if (idx == NAME_NOT_FOUND) {
9798            Vector<PendingBufferInfo> pending;
9799            pending.add(*k);
9800            flushMap.add(k->frame_number, pending);
9801        } else {
9802            Vector<PendingBufferInfo> &pending =
9803                    flushMap.editValueFor(k->frame_number);
9804            pending.add(*k);
9805        }
9806
9807        mPendingBuffersMap.num_buffers--;
9808        k = mPendingBuffersMap.mPendingBufferList.erase(k);
9809    }
9810
9811    pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
9812
9813    // Go through the pending requests info and send error request to framework
9814    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
9815        uint32_t frame_number = flushMap.keyAt(iFlush);
9816        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
9817        CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
9818              __func__, frame_number);
9819
9820        // Send shutter notify to frameworks
9821        camera3_notify_msg_t notify_msg;
9822        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
9823        notify_msg.type = CAMERA3_MSG_ERROR;
9824        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
9825        notify_msg.message.error.error_stream = NULL;
9826        notify_msg.message.error.frame_number = frame_number;
9827        mCallbackOps->notify(mCallbackOps, &notify_msg);
9828
9829        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
9830        if (NULL == pStream_Buf) {
9831            ALOGE("%s: No memory for pending buffers array", __func__);
9832            return NO_MEMORY;
9833        }
9834        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
9835
9836        for (size_t j = 0; j < pending.size(); j++) {
9837            const PendingBufferInfo &info = pending.itemAt(j);
9838            pStream_Buf[j].acquire_fence = -1;
9839            pStream_Buf[j].release_fence = -1;
9840            pStream_Buf[j].buffer = info.buffer;
9841            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
9842            pStream_Buf[j].stream = info.stream;
9843        }
9844
9845        result.input_buffer = i->input_buffer;
9846        result.num_output_buffers = (uint32_t)pending.size();
9847        result.output_buffers = pStream_Buf;
9848        result.result = NULL;
9849        result.frame_number = frame_number;
9850        mCallbackOps->process_capture_result(mCallbackOps, &result);
9851        delete [] pStream_Buf;
9852        i = erasePendingRequest(i);
9853    }
9854
9855    /* Reset pending frame Drop list and requests list */
9856    mPendingFrameDropList.clear();
9857
9858    flushMap.clear();
9859    mPendingBuffersMap.num_buffers = 0;
9860    mPendingBuffersMap.mPendingBufferList.clear();
9861    mPendingReprocessResultList.clear();
9862    CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
9863
9864    return rc;
9865}
9866
9867bool QCamera3HardwareInterface::isOnEncoder(
9868        const cam_dimension_t max_viewfinder_size,
9869        uint32_t width, uint32_t height)
9870{
9871    return (width > (uint32_t)max_viewfinder_size.width ||
9872            height > (uint32_t)max_viewfinder_size.height);
9873}
9874
9875/*===========================================================================
9876 * FUNCTION   : setBundleInfo
9877 *
9878 * DESCRIPTION: Set bundle info for all streams that are bundle.
9879 *
9880 * PARAMETERS : None
9881 *
9882 * RETURN     : NO_ERROR on success
9883 *              Error codes on failure
9884 *==========================================================================*/
9885int32_t QCamera3HardwareInterface::setBundleInfo()
9886{
9887    int32_t rc = NO_ERROR;
9888
9889    if (mChannelHandle) {
9890        cam_bundle_config_t bundleInfo;
9891        memset(&bundleInfo, 0, sizeof(bundleInfo));
9892        rc = mCameraHandle->ops->get_bundle_info(
9893                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
9894        if (rc != NO_ERROR) {
9895            ALOGE("%s: get_bundle_info failed", __func__);
9896            return rc;
9897        }
9898        if (mAnalysisChannel) {
9899            mAnalysisChannel->setBundleInfo(bundleInfo);
9900        }
9901        if (mSupportChannel) {
9902            mSupportChannel->setBundleInfo(bundleInfo);
9903        }
9904        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
9905                it != mStreamInfo.end(); it++) {
9906            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
9907            channel->setBundleInfo(bundleInfo);
9908        }
9909        if (mRawDumpChannel) {
9910            mRawDumpChannel->setBundleInfo(bundleInfo);
9911        }
9912    }
9913
9914    return rc;
9915}
9916
9917}; //end namespace qcamera
9918